diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..859149866a1881bab5563c79297d87adf5255c2c --- /dev/null +++ b/.gitignore @@ -0,0 +1,54 @@ +__pycache__ +*.ckpt +*.safetensors +*.pth +*.pt +*.bin +*.patch +*.backup +*.corrupted +*.partial +*.onnx +sorted_styles.json +/input +/cache +/language/default.json +/test_imgs +config.txt +config_modification_tutorial.txt +user_path_config.txt +user_path_config-deprecated.txt +/modules/*.png +/repositories +/fooocus_env +/venv +/tmp +/ui-config.json +/outputs +/config.json +/log +/webui.settings.bat +/embeddings +/styles.csv +/params.txt +/styles.csv.bak +/webui-user.bat +/webui-user.sh +/interrogate +/user.css +/.idea +/notification.ogg +/notification.mp3 +/SwinIR +/textual_inversion +.vscode +/extensions +/test/stdout.txt +/test/stderr.txt +/cache.json* +/config_states/ +/node_modules +/package-lock.json +/.coverage* +/auth.json +.DS_Store diff --git a/aiDescTerminal.py b/aiDescTerminal.py new file mode 100644 index 0000000000000000000000000000000000000000..178d11d73dba8ff9064f05db642713b666772851 --- /dev/null +++ b/aiDescTerminal.py @@ -0,0 +1,90 @@ +import os +import sys +import numpy as np +from PIL import Image +import requests +from io import BytesIO + +root = os.path.dirname(os.path.abspath(__file__)) +sys.path.append(root) +os.chdir(root) + +import modules.config +import modules.html +import modules.flags as flags +import modules.meta_parser + +def download_image(url): + response = requests.get(url) + img = Image.open(BytesIO(response.content)).convert("RGB") + return img + +def trigger_describe(mode, img_path): + print("Running") + print("Press Ctrl+C for Stop ") + if mode == flags.desc_type_photo: + from extras.interrogate import default_interrogator as default_interrogator_photo + if img_path.startswith('http'): + img = download_image(img_path) + else: + img = Image.open(img_path).convert("RGB") + return default_interrogator_photo(img), ["Fooocus V2", "Fooocus Enhance", "Fooocus Sharp"] + elif mode == flags.desc_type_anime: + from extras.wd14tagger import default_interrogator as default_interrogator_anime + if img_path.startswith('http'): + img = download_image(img_path) + elif isinstance(img_path, str): + # Load the image if the input is a path + img = Image.open(img_path).convert("RGB") + elif isinstance(img_path, np.ndarray): + # Use the provided NumPy array directly + img = Image.fromarray(img_path).convert("RGB") + else: + raise ValueError("Invalid image format. Please provide a valid path or NumPy array.") + + # Convert the image to a NumPy array + img_array = np.array(img) + + return default_interrogator_anime(img_array), ["Fooocus V2", "Fooocus Masterpiece"] + return mode, ["Fooocus V2"] + +style_selections = modules.config.default_styles + +def run_describe(image_path, content_type): + desc_input_image = image_path + desc_method = content_type + + result, style_selections = None, None + + if desc_method in ["Photograph", "1", ""]: + desc_method = "Photograph (1)" + result, style_selections = trigger_describe(flags.desc_type_photo, desc_input_image) + elif desc_method in ["Art/Anime", "2"]: + desc_method = "Art/Anime (2)" + result, style_selections = trigger_describe(flags.desc_type_anime, desc_input_image) + else: + print("ERROR!") + + if result or style_selections != "": + style_selections = "" + print("Result:", result) + # print("Style Selections:", style_selections) + quit() + +if __name__ == "__main__": + desc_input_image = input("Path to Image (local path or URL): ") + + if desc_input_image == "": + desc_input_image = "./imgs/Gambar1.jpg" + + print(f"You use: {desc_input_image}") + + desc_method = input( + """ + Select Content Type: + Photograph (1) + Art/Anime (2) + """ + ) + + run_describe(desc_input_image, desc_method) diff --git a/aiDescUI.py b/aiDescUI.py new file mode 100644 index 0000000000000000000000000000000000000000..664663e47b01d7a65120bc871f1f86fc90b4e156 --- /dev/null +++ b/aiDescUI.py @@ -0,0 +1,688 @@ +import gradio as gr +import random +import os +# import json +import time +import shared +import modules.config +# import fooocus_version +import modules.html +import modules.async_worker as worker +import modules.constants as constants +import modules.flags as flags +import modules.gradio_hijack as grh +import modules.style_sorter as style_sorter +import modules.meta_parser +import args_manager +import copy + +from modules.sdxl_styles import legal_style_names +from modules.private_logger import get_current_html_path +from modules.ui_gradio_extensions import reload_javascript +from modules.auth import auth_enabled, check_auth +# from modules.util import is_json + +# def get_task(*args): +# args = list(args) +# args.pop(0) + +# return worker.AsyncTask(args=args) + +# def generate_clicked(task): +# import ldm_patched.modules.model_management as model_management + +# with model_management.interrupt_processing_mutex: +# model_management.interrupt_processing = False +# # outputs=[progress_html, progress_window, progress_gallery, gallery] +# execution_start_time = time.perf_counter() +# finished = False + +# yield gr.update(visible=True, value=modules.html.make_progress_html(1, 'Waiting for task to start ...')), \ +# gr.update(visible=True, value=None), \ +# gr.update(visible=False, value=None), \ +# gr.update(visible=False) + +# worker.async_tasks.append(task) + +# while not finished: +# time.sleep(0.01) +# if len(task.yields) > 0: +# flag, product = task.yields.pop(0) +# if flag == 'preview': + +# # help bad internet connection by skipping duplicated preview +# if len(task.yields) > 0: # if we have the next item +# if task.yields[0][0] == 'preview': # if the next item is also a preview +# # print('Skipped one preview for better internet connection.') +# continue + +# percentage, title, image = product +# yield gr.update(visible=True, value=modules.html.make_progress_html(percentage, title)), \ +# gr.update(visible=True, value=image) if image is not None else gr.update(), \ +# gr.update(), \ +# gr.update(visible=False) +# if flag == 'results': +# yield gr.update(visible=True), \ +# gr.update(visible=True), \ +# gr.update(visible=True, value=product), \ +# gr.update(visible=False) +# if flag == 'finish': +# yield gr.update(visible=False), \ +# gr.update(visible=False), \ +# gr.update(visible=False), \ +# gr.update(visible=True, value=product) +# finished = True + +# # delete Fooocus temp images, only keep gradio temp images +# if args_manager.args.disable_image_log: +# for filepath in product: +# if isinstance(filepath, str) and os.path.exists(filepath): +# os.remove(filepath) + +# execution_time = time.perf_counter() - execution_start_time +# print(f'Total time: {execution_time:.2f} seconds') +# return + + +reload_javascript() + +title = 'AI Describe Image' + +if isinstance(args_manager.args.preset, str): + title += ' ' + args_manager.args.preset + +shared.gradio_root = gr.Blocks( + title=title, + css=modules.html.css).queue() + +with shared.gradio_root: + # currentTask = gr.State(worker.AsyncTask(args=[])) + with gr.Row(): + with gr.Column(scale=2): + # with gr.Row(): + # progress_window = grh.Image(label='Preview', show_label=True, visible=False, height=768, + # elem_classes=['main_view']) + # progress_gallery = gr.Gallery(label='Finished Images', show_label=True, object_fit='contain', + # height=768, visible=False, elem_classes=['main_view', 'image_gallery']) + # progress_html = gr.HTML(value=modules.html.make_progress_html(32, 'Progress 32%'), visible=False, + # elem_id='progress-bar', elem_classes='progress-bar') + # gallery = gr.Gallery(label='Gallery', show_label=False, object_fit='contain', visible=True, height=768, + # elem_classes=['resizable_area', 'main_view', 'final_gallery', 'image_gallery'], + # elem_id='final_gallery') + with gr.Row(visible=True) as image_input_panel: + with gr.Tabs(): + # with gr.TabItem(label='Upscale or Variation') as uov_tab: + # with gr.Row(): + # with gr.Column(): + # uov_input_image = grh.Image(label='Drag above image to here', source='upload', type='numpy') + # with gr.Column(): + # uov_method = gr.Radio(label='Upscale or Variation:', choices=flags.uov_list, value=flags.disabled) + # gr.HTML('\U0001F4D4 Document') + # with gr.TabItem(label='Image Prompt') as ip_tab: + # with gr.Row(): + # ip_images = [] + # ip_types = [] + # ip_stops = [] + # ip_weights = [] + # ip_ctrls = [] + # ip_ad_cols = [] + # for _ in range(flags.controlnet_image_count): + # with gr.Column(): + # ip_image = grh.Image(label='Image', source='upload', type='numpy', show_label=False, height=300) + # ip_images.append(ip_image) + # ip_ctrls.append(ip_image) + # with gr.Column(visible=False) as ad_col: + # with gr.Row(): + # default_end, default_weight = flags.default_parameters[flags.default_ip] + + # ip_stop = gr.Slider(label='Stop At', minimum=0.0, maximum=1.0, step=0.001, value=default_end) + # ip_stops.append(ip_stop) + # ip_ctrls.append(ip_stop) + + # ip_weight = gr.Slider(label='Weight', minimum=0.0, maximum=2.0, step=0.001, value=default_weight) + # ip_weights.append(ip_weight) + # ip_ctrls.append(ip_weight) + + # ip_type = gr.Radio(label='Type', choices=flags.ip_list, value=flags.default_ip, container=False) + # ip_types.append(ip_type) + # ip_ctrls.append(ip_type) + + # ip_type.change(lambda x: flags.default_parameters[x], inputs=[ip_type], outputs=[ip_stop, ip_weight], queue=False, show_progress=False) + # ip_ad_cols.append(ad_col) + # ip_advanced = gr.Checkbox(label='Advanced', value=False, container=False) + # gr.HTML('* \"Image Prompt\" is powered by Fooocus Image Mixture Engine (v1.0.1). \U0001F4D4 Document') + + # def ip_advance_checked(x): + # return [gr.update(visible=x)] * len(ip_ad_cols) + \ + # [flags.default_ip] * len(ip_types) + \ + # [flags.default_parameters[flags.default_ip][0]] * len(ip_stops) + \ + # [flags.default_parameters[flags.default_ip][1]] * len(ip_weights) + + # ip_advanced.change(ip_advance_checked, inputs=ip_advanced, + # outputs=ip_ad_cols + ip_types + ip_stops + ip_weights, + # queue=False, show_progress=False) + # with gr.TabItem(label='Inpaint or Outpaint') as inpaint_tab: + # with gr.Row(): + # inpaint_input_image = grh.Image(label='Drag inpaint or outpaint image to here', source='upload', type='numpy', tool='sketch', height=500, brush_color="#FFFFFF", elem_id='inpaint_canvas') + # inpaint_mask_image = grh.Image(label='Mask Upload', source='upload', type='numpy', height=500, visible=False) + + # with gr.Row(): + # inpaint_additional_prompt = gr.Textbox(placeholder="Describe what you want to inpaint.", elem_id='inpaint_additional_prompt', label='Inpaint Additional Prompt', visible=False) + # outpaint_selections = gr.CheckboxGroup(choices=['Left', 'Right', 'Top', 'Bottom'], value=[], label='Outpaint Direction') + # inpaint_mode = gr.Dropdown(choices=modules.flags.inpaint_options, value=modules.flags.inpaint_option_default, label='Method') + # example_inpaint_prompts = gr.Dataset(samples=modules.config.example_inpaint_prompts, label='Additional Prompt Quick List', components=[inpaint_additional_prompt], visible=False) + # gr.HTML('* Powered by Fooocus Inpaint Engine \U0001F4D4 Document') + # example_inpaint_prompts.click(lambda x: x[0], inputs=example_inpaint_prompts, outputs=inpaint_additional_prompt, show_progress=False, queue=False) + with gr.TabItem(label='Describe') as desc_tab: + with gr.Row(): + with gr.Column(): + desc_input_image = grh.Image(label='Drag any image to here', source='upload', type='numpy') + with gr.Column(): + # with gr.Row(elem_classes='type_row'): + with gr.Row(): + prompt = gr.Textbox(label="Output", show_label=True, elem_id='positive_prompt', container=True, autofocus=True, show_copy_button=True, interactive=True) + + default_prompt = modules.config.default_prompt + if isinstance(default_prompt, str) and default_prompt != '': + shared.gradio_root.load(lambda: default_prompt, outputs=prompt) + + # with gr.Column(scale=3, min_width=0): + # generate_button = gr.Button(label="Generate", value="Generate", elem_classes='type_row', elem_id='generate_button', visible=True) + # load_parameter_button = gr.Button(label="Load Parameters", value="Load Parameters", elem_classes='type_row', elem_id='load_parameter_button', visible=False) + # skip_button = gr.Button(label="Skip", value="Skip", elem_classes='type_row_half', visible=False) + # stop_button = gr.Button(label="Stop", value="Stop", elem_classes='type_row_half', elem_id='stop_button', visible=False) + + # def stop_clicked(currentTask): + # import ldm_patched.modules.model_management as model_management + # currentTask.last_stop = 'stop' + # if (currentTask.processing): + # model_management.interrupt_current_processing() + # return currentTask + + # def skip_clicked(currentTask): + # import ldm_patched.modules.model_management as model_management + # currentTask.last_stop = 'skip' + # if (currentTask.processing): + # model_management.interrupt_current_processing() + # return currentTask + + # stop_button.click(stop_clicked, inputs=currentTask, outputs=currentTask, queue=False, show_progress=False, _js='cancelGenerateForever') + # skip_button.click(skip_clicked, inputs=currentTask, outputs=currentTask, queue=False, show_progress=False) + # with gr.Row(elem_classes='advanced_check_row'): + # # input_image_checkbox = gr.Checkbox(label='Input Image', value=False, container=False, elem_classes='min_check') + # advanced_checkbox = gr.Checkbox(label='Advanced', value=modules.config.default_advanced_checkbox, container=False, elem_classes='min_check') + with gr.Row(): + desc_method = gr.Radio( + label='Content Type', + choices=[flags.desc_type_photo, flags.desc_type_anime], + value=flags.desc_type_photo) + desc_btn = gr.Button(value='Describe this Image into Prompt') + # gr.HTML('\U0001F4D4 Document') + # with gr.TabItem(label='Metadata') as load_tab: + # with gr.Column(): + # metadata_input_image = grh.Image(label='Drag any image generated by Fooocus here', source='upload', type='filepath') + # metadata_json = gr.JSON(label='Metadata') + # metadata_import_button = gr.Button(value='Apply Metadata') + + # def trigger_metadata_preview(filepath): + # parameters, metadata_scheme = modules.meta_parser.read_info_from_image(filepath) + + # results = {} + # if parameters is not None: + # results['parameters'] = parameters + + # if isinstance(metadata_scheme, flags.MetadataScheme): + # results['metadata_scheme'] = metadata_scheme.value + + # return results + + # metadata_input_image.upload(trigger_metadata_preview, inputs=metadata_input_image, + # outputs=metadata_json, queue=False, show_progress=True) + + switch_js = "(x) => {if(x){viewer_to_bottom(100);viewer_to_bottom(500);}else{viewer_to_top();} return x;}" + down_js = "() => {viewer_to_bottom();}" + + # input_image_checkbox.change(lambda x: gr.update(visible=x), inputs=input_image_checkbox, + # outputs=image_input_panel, queue=False, show_progress=False, _js=switch_js) + # ip_advanced.change(lambda: None, queue=False, show_progress=False, _js=down_js) + + # current_tab = gr.Textbox(value='desc', visible=False) + # # uov_tab.select(lambda: 'uov', outputs=current_tab, queue=False, _js=down_js, show_progress=False) + # # inpaint_tab.select(lambda: 'inpaint', outputs=current_tab, queue=False, _js=down_js, show_progress=False) + # # ip_tab.select(lambda: 'ip', outputs=current_tab, queue=False, _js=down_js, show_progress=False) + # desc_tab.select(lambda: 'desc', outputs=current_tab, queue=False, _js=down_js, show_progress=False) + + # with gr.Column(scale=1, visible=modules.config.default_advanced_checkbox) as advanced_column: + # with gr.Tab(label='Setting'): + # performance_selection = gr.Radio(label='Performance', + # choices=modules.flags.performance_selections, + # value=modules.config.default_performance) + # aspect_ratios_selection = gr.Radio(label='Aspect Ratios', choices=modules.config.available_aspect_ratios, + # value=modules.config.default_aspect_ratio, info='width × height', + # elem_classes='aspect_ratios') + # image_number = gr.Slider(label='Image Number', minimum=1, maximum=modules.config.default_max_image_number, step=1, value=modules.config.default_image_number) + + # output_format = gr.Radio(label='Output Format', + # choices=modules.flags.output_formats, + # value=modules.config.default_output_format) + + # negative_prompt = gr.Textbox(label='Negative Prompt', show_label=True, placeholder="Type prompt here.", + # info='Describing what you do not want to see.', lines=2, + # elem_id='negative_prompt', + # value=modules.config.default_prompt_negative) + # seed_random = gr.Checkbox(label='Random', value=True) + # image_seed = gr.Textbox(label='Seed', value=0, max_lines=1, visible=False) # workaround for https://github.com/gradio-app/gradio/issues/5354 + + # def random_checked(r): + # return gr.update(visible=not r) + + # def refresh_seed(r, seed_string): + # if r: + # return random.randint(constants.MIN_SEED, constants.MAX_SEED) + # else: + # try: + # seed_value = int(seed_string) + # if constants.MIN_SEED <= seed_value <= constants.MAX_SEED: + # return seed_value + # except ValueError: + # pass + # return random.randint(constants.MIN_SEED, constants.MAX_SEED) + + # seed_random.change(random_checked, inputs=[seed_random], outputs=[image_seed], + # queue=False, show_progress=False) + + # def update_history_link(): + # if args_manager.args.disable_image_log: + # return gr.update(value='') + + # return gr.update(value=f'\U0001F4DA History Log') + + # history_link = gr.HTML() + # shared.gradio_root.load(update_history_link, outputs=history_link, queue=False, show_progress=False) + + # with gr.Tab(label='Style'): + # style_sorter.try_load_sorted_styles( + # style_names=legal_style_names, + # default_selected=modules.config.default_styles) + + # style_search_bar = gr.Textbox(show_label=False, container=False, + # placeholder="\U0001F50E Type here to search styles ...", + # value="", + # label='Search Styles') + # style_selections = gr.CheckboxGroup(show_label=False, container=False, + # choices=copy.deepcopy(style_sorter.all_styles), + # value=copy.deepcopy(modules.config.default_styles), + # label='Selected Styles', + # elem_classes=['style_selections']) + # gradio_receiver_style_selections = gr.Textbox(elem_id='gradio_receiver_style_selections', visible=False) + + # shared.gradio_root.load(lambda: gr.update(choices=copy.deepcopy(style_sorter.all_styles)), + # outputs=style_selections) + + # style_search_bar.change(style_sorter.search_styles, + # inputs=[style_selections, style_search_bar], + # outputs=style_selections, + # queue=False, + # show_progress=False).then( + # lambda: None, _js='()=>{refresh_style_localization();}') + + # gradio_receiver_style_selections.input(style_sorter.sort_styles, + # inputs=style_selections, + # outputs=style_selections, + # queue=False, + # show_progress=False).then( + # lambda: None, _js='()=>{refresh_style_localization();}') + + # with gr.Tab(label='Model'): + # with gr.Group(): + # with gr.Row(): + # base_model = gr.Dropdown(label='Base Model (SDXL only)', choices=modules.config.model_filenames, value=modules.config.default_base_model_name, show_label=True) + # refiner_model = gr.Dropdown(label='Refiner (SDXL or SD 1.5)', choices=['None'] + modules.config.model_filenames, value=modules.config.default_refiner_model_name, show_label=True) + + # refiner_switch = gr.Slider(label='Refiner Switch At', minimum=0.1, maximum=1.0, step=0.0001, + # info='Use 0.4 for SD1.5 realistic models; ' + # 'or 0.667 for SD1.5 anime models; ' + # 'or 0.8 for XL-refiners; ' + # 'or any value for switching two SDXL models.', + # value=modules.config.default_refiner_switch, + # visible=modules.config.default_refiner_model_name != 'None') + + # refiner_model.change(lambda x: gr.update(visible=x != 'None'), + # inputs=refiner_model, outputs=refiner_switch, show_progress=False, queue=False) + + # with gr.Group(): + # lora_ctrls = [] + + # for i, (n, v) in enumerate(modules.config.default_loras): + # with gr.Row(): + # lora_enabled = gr.Checkbox(label='Enable', value=True, + # elem_classes=['lora_enable', 'min_check'], scale=1) + # lora_model = gr.Dropdown(label=f'LoRA {i + 1}', + # choices=['None'] + modules.config.lora_filenames, value=n, + # elem_classes='lora_model', scale=5) + # lora_weight = gr.Slider(label='Weight', minimum=modules.config.default_loras_min_weight, + # maximum=modules.config.default_loras_max_weight, step=0.01, value=v, + # elem_classes='lora_weight', scale=5) + # lora_ctrls += [lora_enabled, lora_model, lora_weight] + + # with gr.Row(): + # model_refresh = gr.Button(label='Refresh', value='\U0001f504 Refresh All Files', variant='secondary', elem_classes='refresh_button') + # with gr.Tab(label='Advanced'): + # guidance_scale = gr.Slider(label='Guidance Scale', minimum=1.0, maximum=30.0, step=0.01, + # value=modules.config.default_cfg_scale, + # info='Higher value means style is cleaner, vivider, and more artistic.') + # sharpness = gr.Slider(label='Image Sharpness', minimum=0.0, maximum=30.0, step=0.001, + # value=modules.config.default_sample_sharpness, + # info='Higher value means image and texture are sharper.') + # gr.HTML('\U0001F4D4 Document') + # dev_mode = gr.Checkbox(label='Developer Debug Mode', value=False, container=False) + + # with gr.Column(visible=False) as dev_tools: + # with gr.Tab(label='Debug Tools'): + # adm_scaler_positive = gr.Slider(label='Positive ADM Guidance Scaler', minimum=0.1, maximum=3.0, + # step=0.001, value=1.5, info='The scaler multiplied to positive ADM (use 1.0 to disable). ') + # adm_scaler_negative = gr.Slider(label='Negative ADM Guidance Scaler', minimum=0.1, maximum=3.0, + # step=0.001, value=0.8, info='The scaler multiplied to negative ADM (use 1.0 to disable). ') + # adm_scaler_end = gr.Slider(label='ADM Guidance End At Step', minimum=0.0, maximum=1.0, + # step=0.001, value=0.3, + # info='When to end the guidance from positive/negative ADM. ') + + # refiner_swap_method = gr.Dropdown(label='Refiner swap method', value=flags.refiner_swap_method, + # choices=['joint', 'separate', 'vae']) + + # adaptive_cfg = gr.Slider(label='CFG Mimicking from TSNR', minimum=1.0, maximum=30.0, step=0.01, + # value=modules.config.default_cfg_tsnr, + # info='Enabling Fooocus\'s implementation of CFG mimicking for TSNR ' + # '(effective when real CFG > mimicked CFG).') + # sampler_name = gr.Dropdown(label='Sampler', choices=flags.sampler_list, + # value=modules.config.default_sampler) + # scheduler_name = gr.Dropdown(label='Scheduler', choices=flags.scheduler_list, + # value=modules.config.default_scheduler) + + # generate_image_grid = gr.Checkbox(label='Generate Image Grid for Each Batch', + # info='(Experimental) This may cause performance problems on some computers and certain internet conditions.', + # value=False) + + # overwrite_step = gr.Slider(label='Forced Overwrite of Sampling Step', + # minimum=-1, maximum=200, step=1, + # value=modules.config.default_overwrite_step, + # info='Set as -1 to disable. For developer debugging.') + # overwrite_switch = gr.Slider(label='Forced Overwrite of Refiner Switch Step', + # minimum=-1, maximum=200, step=1, + # value=modules.config.default_overwrite_switch, + # info='Set as -1 to disable. For developer debugging.') + # overwrite_width = gr.Slider(label='Forced Overwrite of Generating Width', + # minimum=-1, maximum=2048, step=1, value=-1, + # info='Set as -1 to disable. For developer debugging. ' + # 'Results will be worse for non-standard numbers that SDXL is not trained on.') + # overwrite_height = gr.Slider(label='Forced Overwrite of Generating Height', + # minimum=-1, maximum=2048, step=1, value=-1, + # info='Set as -1 to disable. For developer debugging. ' + # 'Results will be worse for non-standard numbers that SDXL is not trained on.') + # overwrite_vary_strength = gr.Slider(label='Forced Overwrite of Denoising Strength of "Vary"', + # minimum=-1, maximum=1.0, step=0.001, value=-1, + # info='Set as negative number to disable. For developer debugging.') + # overwrite_upscale_strength = gr.Slider(label='Forced Overwrite of Denoising Strength of "Upscale"', + # minimum=-1, maximum=1.0, step=0.001, value=-1, + # info='Set as negative number to disable. For developer debugging.') + # disable_preview = gr.Checkbox(label='Disable Preview', value=False, + # info='Disable preview during generation.') + # disable_intermediate_results = gr.Checkbox(label='Disable Intermediate Results', + # value=modules.config.default_performance == 'Extreme Speed', + # interactive=modules.config.default_performance != 'Extreme Speed', + # info='Disable intermediate results during generation, only show final gallery.') + # disable_seed_increment = gr.Checkbox(label='Disable seed increment', + # info='Disable automatic seed increment when image number is > 1.', + # value=False) + + # # if not args_manager.args.disable_metadata: + # # save_metadata_to_images = gr.Checkbox(label='Save Metadata to Images', value=modules.config.default_save_metadata_to_images, + # # info='Adds parameters to generated images allowing manual regeneration.') + # # metadata_scheme = gr.Radio(label='Metadata Scheme', choices=flags.metadata_scheme, value=modules.config.default_metadata_scheme, + # # info='Image Prompt parameters are not included. Use png and a1111 for compatibility with Civitai.', + # # visible=modules.config.default_save_metadata_to_images) + + # # save_metadata_to_images.change(lambda x: gr.update(visible=x), inputs=[save_metadata_to_images], outputs=[metadata_scheme], + # # queue=False, show_progress=False) + + # # with gr.Tab(label='Control'): + # # debugging_cn_preprocessor = gr.Checkbox(label='Debug Preprocessors', value=False, + # # info='See the results from preprocessors.') + # # skipping_cn_preprocessor = gr.Checkbox(label='Skip Preprocessors', value=False, + # # info='Do not preprocess images. (Inputs are already canny/depth/cropped-face/etc.)') + + # # mixing_image_prompt_and_vary_upscale = gr.Checkbox(label='Mixing Image Prompt and Vary/Upscale', + # # value=False) + # # mixing_image_prompt_and_inpaint = gr.Checkbox(label='Mixing Image Prompt and Inpaint', + # # value=False) + + # # controlnet_softness = gr.Slider(label='Softness of ControlNet', minimum=0.0, maximum=1.0, + # # step=0.001, value=0.25, + # # info='Similar to the Control Mode in A1111 (use 0.0 to disable). ') + + # # with gr.Tab(label='Canny'): + # # canny_low_threshold = gr.Slider(label='Canny Low Threshold', minimum=1, maximum=255, + # # step=1, value=64) + # # canny_high_threshold = gr.Slider(label='Canny High Threshold', minimum=1, maximum=255, + # # step=1, value=128) + + # # with gr.Tab(label='Inpaint'): + # # debugging_inpaint_preprocessor = gr.Checkbox(label='Debug Inpaint Preprocessing', value=False) + # # inpaint_disable_initial_latent = gr.Checkbox(label='Disable initial latent in inpaint', value=False) + # # inpaint_engine = gr.Dropdown(label='Inpaint Engine', + # # value=modules.config.default_inpaint_engine_version, + # # choices=flags.inpaint_engine_versions, + # # info='Version of Fooocus inpaint model') + # # inpaint_strength = gr.Slider(label='Inpaint Denoising Strength', + # # minimum=0.0, maximum=1.0, step=0.001, value=1.0, + # # info='Same as the denoising strength in A1111 inpaint. ' + # # 'Only used in inpaint, not used in outpaint. ' + # # '(Outpaint always use 1.0)') + # # inpaint_respective_field = gr.Slider(label='Inpaint Respective Field', + # # minimum=0.0, maximum=1.0, step=0.001, value=0.618, + # # info='The area to inpaint. ' + # # 'Value 0 is same as "Only Masked" in A1111. ' + # # 'Value 1 is same as "Whole Image" in A1111. ' + # # 'Only used in inpaint, not used in outpaint. ' + # # '(Outpaint always use 1.0)') + # # inpaint_erode_or_dilate = gr.Slider(label='Mask Erode or Dilate', + # # minimum=-64, maximum=64, step=1, value=0, + # # info='Positive value will make white area in the mask larger, ' + # # 'negative value will make white area smaller.' + # # '(default is 0, always process before any mask invert)') + # # inpaint_mask_upload_checkbox = gr.Checkbox(label='Enable Mask Upload', value=False) + # # invert_mask_checkbox = gr.Checkbox(label='Invert Mask', value=False) + + # # inpaint_ctrls = [debugging_inpaint_preprocessor, inpaint_disable_initial_latent, inpaint_engine, + # # inpaint_strength, inpaint_respective_field, + # # inpaint_mask_upload_checkbox, invert_mask_checkbox, inpaint_erode_or_dilate] + + # # inpaint_mask_upload_checkbox.change(lambda x: gr.update(visible=x), + # # inputs=inpaint_mask_upload_checkbox, + # # outputs=inpaint_mask_image, queue=False, show_progress=False) + + # with gr.Tab(label='FreeU'): + # freeu_enabled = gr.Checkbox(label='Enabled', value=False) + # freeu_b1 = gr.Slider(label='B1', minimum=0, maximum=2, step=0.01, value=1.01) + # freeu_b2 = gr.Slider(label='B2', minimum=0, maximum=2, step=0.01, value=1.02) + # freeu_s1 = gr.Slider(label='S1', minimum=0, maximum=4, step=0.01, value=0.99) + # freeu_s2 = gr.Slider(label='S2', minimum=0, maximum=4, step=0.01, value=0.95) + # freeu_ctrls = [freeu_enabled, freeu_b1, freeu_b2, freeu_s1, freeu_s2] + + # def dev_mode_checked(r): + # return gr.update(visible=r) + + + # dev_mode.change(dev_mode_checked, inputs=[dev_mode], outputs=[dev_tools], + # queue=False, show_progress=False) + + # def model_refresh_clicked(): + # modules.config.update_all_model_names() + # results = [gr.update(choices=modules.config.model_filenames)] + # results += [gr.update(choices=['None'] + modules.config.model_filenames)] + # for i in range(modules.config.default_max_lora_number): + # results += [gr.update(interactive=True), gr.update(choices=['None'] + modules.config.lora_filenames), gr.update()] + # return results + + # model_refresh.click(model_refresh_clicked, [], [base_model, refiner_model] + lora_ctrls, + # queue=False, show_progress=False) + + # performance_selection.change(lambda x: [gr.update(interactive=x != 'Extreme Speed')] * 11 + + # [gr.update(visible=x != 'Extreme Speed')] * 1 + + # [gr.update(interactive=x != 'Extreme Speed', value=x == 'Extreme Speed', )] * 1, + # inputs=performance_selection, + # outputs=[ + # guidance_scale, sharpness, adm_scaler_end, adm_scaler_positive, + # adm_scaler_negative, refiner_switch, refiner_model, sampler_name, + # scheduler_name, adaptive_cfg, refiner_swap_method, negative_prompt, disable_intermediate_results + # ], queue=False, show_progress=False) + + # output_format.input(lambda x: gr.update(output_format=x), inputs=output_format) + + # advanced_checkbox.change(lambda x: gr.update(visible=x), advanced_checkbox, advanced_column, + # queue=False, show_progress=False) \ + # .then(fn=lambda: None, _js='refresh_grid_delayed', queue=False, show_progress=False) + + # def inpaint_mode_change(mode): + # assert mode in modules.flags.inpaint_options + + # # inpaint_additional_prompt, outpaint_selections, example_inpaint_prompts, + # # inpaint_disable_initial_latent, inpaint_engine, + # # inpaint_strength, inpaint_respective_field + + # if mode == modules.flags.inpaint_option_detail: + # return [ + # gr.update(visible=True), gr.update(visible=False, value=[]), + # gr.Dataset.update(visible=True, samples=modules.config.example_inpaint_prompts), + # False, 'None', 0.5, 0.0 + # ] + + # if mode == modules.flags.inpaint_option_modify: + # return [ + # gr.update(visible=True), gr.update(visible=False, value=[]), + # gr.Dataset.update(visible=False, samples=modules.config.example_inpaint_prompts), + # True, modules.config.default_inpaint_engine_version, 1.0, 0.0 + # ] + + # return [ + # gr.update(visible=False, value=''), gr.update(visible=True), + # gr.Dataset.update(visible=False, samples=modules.config.example_inpaint_prompts), + # False, modules.config.default_inpaint_engine_version, 1.0, 0.618 + # ] + + # inpaint_mode.input(inpaint_mode_change, inputs=inpaint_mode, outputs=[ + # inpaint_additional_prompt, outpaint_selections, example_inpaint_prompts, + # inpaint_disable_initial_latent, inpaint_engine, + # inpaint_strength, inpaint_respective_field + # ], show_progress=False, queue=False) + + # ctrls = [currentTask, generate_image_grid] + # ctrls += [ + # prompt, negative_prompt, style_selections, + # performance_selection, aspect_ratios_selection, image_number, output_format, image_seed, sharpness, guidance_scale + # ] + + # ctrls += [base_model, refiner_model, refiner_switch] + lora_ctrls + # # ctrls += [input_image_checkbox, current_tab] + # # ctrls += [uov_method, uov_input_image] + # # ctrls += [outpaint_selections, inpaint_input_image, inpaint_additional_prompt, inpaint_mask_image] + # ctrls += [disable_preview, disable_intermediate_results, disable_seed_increment] + # ctrls += [adm_scaler_positive, adm_scaler_negative, adm_scaler_end, adaptive_cfg] + # ctrls += [sampler_name, scheduler_name] + # ctrls += [overwrite_step, overwrite_switch, overwrite_width, overwrite_height, overwrite_vary_strength] + # ctrls += [overwrite_upscale_strength, mixing_image_prompt_and_vary_upscale, mixing_image_prompt_and_inpaint] + # ctrls += [debugging_cn_preprocessor, skipping_cn_preprocessor, canny_low_threshold, canny_high_threshold] + # ctrls += [refiner_swap_method, controlnet_softness] + # ctrls += freeu_ctrls + # ctrls += inpaint_ctrls + + # if not args_manager.args.disable_metadata: + # ctrls += [save_metadata_to_images, metadata_scheme] + + # ctrls += ip_ctrls + + # state_is_generating = gr.State(False) + + # def parse_meta(raw_prompt_txt, is_generating): + # loaded_json = None + # if is_json(raw_prompt_txt): + # loaded_json = json.loads(raw_prompt_txt) + + # if loaded_json is None: + # if is_generating: + # return gr.update(), gr.update(), gr.update() + # else: + # return gr.update(), gr.update(visible=True), gr.update(visible=False) + + # return json.dumps(loaded_json), gr.update(visible=False), gr.update(visible=True) + + # prompt.input(parse_meta, inputs=[prompt, state_is_generating], outputs=[prompt, generate_button, load_parameter_button], queue=False, show_progress=False) + + # load_data_outputs = [advanced_checkbox, image_number, prompt, negative_prompt, style_selections, + # performance_selection, overwrite_step, overwrite_switch, aspect_ratios_selection, + # overwrite_width, overwrite_height, guidance_scale, sharpness, adm_scaler_positive, + # adm_scaler_negative, adm_scaler_end, refiner_swap_method, adaptive_cfg, base_model, + # refiner_model, refiner_switch, sampler_name, scheduler_name, seed_random, image_seed, + # generate_button, load_parameter_button] + freeu_ctrls + lora_ctrls + + # load_parameter_button.click(modules.meta_parser.load_parameter_button_click, inputs=[prompt, state_is_generating], outputs=load_data_outputs, queue=False, show_progress=False) + + # # def trigger_metadata_import(filepath, state_is_generating): + # # parameters, metadata_scheme = modules.meta_parser.read_info_from_image(filepath) + # # if parameters is None: + # # print('Could not find metadata in the image!') + # # parsed_parameters = {} + # # else: + # # metadata_parser = modules.meta_parser.get_metadata_parser(metadata_scheme) + # # parsed_parameters = metadata_parser.parse_json(parameters) + + # # return modules.meta_parser.load_parameter_button_click(parsed_parameters, state_is_generating) + + + # # metadata_import_button.click(trigger_metadata_import, inputs=[metadata_input_image, state_is_generating], outputs=load_data_outputs, queue=False, show_progress=True) \ + # # .then(style_sorter.sort_styles, inputs=style_selections, outputs=style_selections, queue=False, show_progress=False) + + # generate_button.click(lambda: (gr.update(visible=True, interactive=True), gr.update(visible=True, interactive=True), gr.update(visible=False, interactive=False), [], True), + # outputs=[stop_button, skip_button, generate_button, gallery, state_is_generating]) \ + # .then(fn=refresh_seed, inputs=[seed_random, image_seed], outputs=image_seed) \ + # .then(fn=get_task, inputs=ctrls, outputs=currentTask) \ + # .then(fn=generate_clicked, inputs=currentTask, outputs=[progress_html, progress_window, progress_gallery, gallery]) \ + # .then(lambda: (gr.update(visible=True, interactive=True), gr.update(visible=False, interactive=False), gr.update(visible=False, interactive=False), False), + # outputs=[generate_button, stop_button, skip_button, state_is_generating]) \ + # .then(fn=update_history_link, outputs=history_link) \ + # .then(fn=lambda: None, _js='playNotification').then(fn=lambda: None, _js='refresh_grid_delayed') + + for notification_file in ['notification.ogg', 'notification.mp3']: + if os.path.exists(notification_file): + gr.Audio(interactive=False, value=notification_file, elem_id='audio_notification', visible=False) + break + + def trigger_describe(mode, img): + if mode == flags.desc_type_photo: + from extras.interrogate import default_interrogator as default_interrogator_photo + return default_interrogator_photo(img), ["Fooocus V2", "Fooocus Enhance", "Fooocus Sharp"] + if mode == flags.desc_type_anime: + from extras.wd14tagger import default_interrogator as default_interrogator_anime + return default_interrogator_anime(img), ["Fooocus V2", "Fooocus Masterpiece"] + return mode, ["Fooocus V2"] + + desc_btn.click(trigger_describe, inputs=[desc_method, desc_input_image], + outputs=prompt, show_progress=True, queue=True) + + +def dump_default_english_config(): + from modules.localization import dump_english_config + dump_english_config(grh.all_components) + + +# dump_default_english_config() + +shared.gradio_root.launch( + inbrowser=args_manager.args.in_browser, + server_name=args_manager.args.listen, + server_port=args_manager.args.port, + share=args_manager.args.share, + auth=check_auth if (args_manager.args.share or args_manager.args.listen) and auth_enabled else None, + allowed_paths=[modules.config.path_outputs], + blocked_paths=[constants.AUTH_FILENAME] +) diff --git a/app.html b/app.html new file mode 100644 index 0000000000000000000000000000000000000000..4eb28948436470e8d4ee7f1ad62fd9615cb1ea77 --- /dev/null +++ b/app.html @@ -0,0 +1,6 @@ + + + \ No newline at end of file diff --git a/app.py b/app.py new file mode 100644 index 0000000000000000000000000000000000000000..95b1eb3ed05025a85121c2b34e55e01a48c2e654 --- /dev/null +++ b/app.py @@ -0,0 +1,67 @@ +print("Wait..") + +def test(): + import gradio as gr + + def analyze_text(text): + # Lakukan analisis atau pemrosesan teks di sini + result = f"Anda memasukkan teks: {text}" + return result + + iface = gr.Interface( + fn=analyze_text, + inputs=gr.Textbox(), # Menggunakan input textbox + outputs="text" # Menetapkan output ke tipe teks + ) + + iface.launch() + +def process(): + import subprocess + + def uninstall_and_install_gradio(version): + # Uninstall current Gradio + uninstall_command = ["pip", "uninstall", "gradio", "-y"] + subprocess.run(uninstall_command) + + # Install specific version of Gradio + install_command = ["pip", "install", f"gradio=={version}"] + subprocess.run(install_command) + + # Gantilah "3.41.2" dengan versi Gradio yang diinginkan + desired_version = "3.41.2" + + # Periksa versi Gradio yang terinstal + current_version_command = ["pip", "show", "gradio"] + result = subprocess.run(current_version_command, capture_output=True, text=True) + current_version = None + + if "Version" in result.stdout: + current_version = result.stdout.split("Version:")[1].strip() + + # Cek dan lakukan uninstall dan install jika versi tidak sesuai + if current_version != desired_version: + uninstall_and_install_gradio(desired_version) + print(f"Gradio has been updated to version {desired_version}") + else: + print(f"Gradio is already at version {desired_version}") + + python_script = "entry_with_update.py" + + # Argument yang ingin Anda tambahkan + # additional_arguments = ["--in-browser", "--all-in-fp32", "--directml", "--debug-mode", "--multi-user", "--always-cpu", "--is-windows-embedded-python"] + additional_arguments = ["--always-cpu"] + + # Gabungkan semua argumen + PIP = ["pip", "install", "-r", "requirements.txt"] + command = ["python", python_script] + additional_arguments + + # Jalankan skrip menggunakan subprocess + subprocess.run(PIP) + print("Installing..") + + subprocess.run(command)# Menjalankan file batch + print("Running..") + # subprocess.run([batch_file_path], shell=True) + +process() \ No newline at end of file diff --git a/args_manager.py b/args_manager.py new file mode 100644 index 0000000000000000000000000000000000000000..c7c1b7ab113c81bdfa3f37ba8a4775ca882bab18 --- /dev/null +++ b/args_manager.py @@ -0,0 +1,55 @@ +import ldm_patched.modules.args_parser as args_parser +import os + +from tempfile import gettempdir + +args_parser.parser.add_argument("--share", action='store_true', help="Set whether to share on Gradio.") +args_parser.parser.add_argument("--preset", type=str, default=None, help="Apply specified UI preset.") + +args_parser.parser.add_argument("--language", type=str, default='default', + help="Translate UI using json files in [language] folder. " + "For example, [--language example] will use [language/example.json] for translation.") + +# For example, https://github.com/lllyasviel/Fooocus/issues/849 +args_parser.parser.add_argument("--disable-offload-from-vram", action="store_true", + help="Force loading models to vram when the unload can be avoided. " + "Some Mac users may need this.") + +args_parser.parser.add_argument("--theme", type=str, help="launches the UI with light or dark theme", default=None) +args_parser.parser.add_argument("--disable-image-log", action='store_true', + help="Prevent writing images and logs to hard drive.") + +args_parser.parser.add_argument("--disable-analytics", action='store_true', + help="Disables analytics for Gradio.") + +args_parser.parser.add_argument("--disable-metadata", action='store_true', + help="Disables saving metadata to images.") + +args_parser.parser.add_argument("--disable-preset-download", action='store_true', + help="Disables downloading models for presets", default=False) + +args_parser.parser.add_argument("--always-download-new-model", action='store_true', + help="Always download newer models ", default=False) + +args_parser.parser.set_defaults( + disable_cuda_malloc=True, + in_browser=True, + port=None +) + +args_parser.args = args_parser.parser.parse_args() + +# (Disable by default because of issues like https://github.com/lllyasviel/Fooocus/issues/724) +args_parser.args.always_offload_from_vram = not args_parser.args.disable_offload_from_vram + +if args_parser.args.disable_analytics: + import os + os.environ["GRADIO_ANALYTICS_ENABLED"] = "False" + +if args_parser.args.disable_in_browser: + args_parser.args.in_browser = False + +if args_parser.args.temp_path is None: + args_parser.args.temp_path = os.path.join(gettempdir(), 'Fooocus') + +args = args_parser.args diff --git a/auth-example.json b/auth-example.json new file mode 100644 index 0000000000000000000000000000000000000000..59e321d01a4c8424c782daad82b04cd9cf8db8e5 --- /dev/null +++ b/auth-example.json @@ -0,0 +1,6 @@ +[ + { + "user": "sitting-duck-1", + "pass": "very-bad-publicly-known-password-change-it" + } +] diff --git a/auth.json b/auth.json new file mode 100644 index 0000000000000000000000000000000000000000..b719eef5ceb22d538a4b3d14c4637f0eb9fd8523 --- /dev/null +++ b/auth.json @@ -0,0 +1,6 @@ +[ + { + "user": "user123", + "pass": "pass123" + } +] diff --git a/build_launcher.py b/build_launcher.py new file mode 100644 index 0000000000000000000000000000000000000000..4443888b20c595d5233405d614f104bd991fe93c --- /dev/null +++ b/build_launcher.py @@ -0,0 +1,26 @@ +import os + +win32_root = os.path.dirname(os.path.dirname(__file__)) +python_embeded_path = os.path.join(win32_root, 'python_embeded') + +is_win32_standalone_build = os.path.exists(python_embeded_path) and os.path.isdir(python_embeded_path) + +win32_cmd = ''' +.\python_embeded\python.exe -s Fooocus\entry_with_update.py {cmds} %* +pause +''' + + +def build_launcher(): + if not is_win32_standalone_build: + return + + presets = [None, 'anime', 'realistic'] + + for preset in presets: + win32_cmd_preset = win32_cmd.replace('{cmds}', '' if preset is None else f'--preset {preset}') + bat_path = os.path.join(win32_root, 'run.bat' if preset is None else f'run_{preset}.bat') + if not os.path.exists(bat_path): + with open(bat_path, "w", encoding="utf-8") as f: + f.write(win32_cmd_preset) + return diff --git a/config.txt b/config.txt new file mode 100644 index 0000000000000000000000000000000000000000..57fadd354e548f4f5c79b1daad67df37376dcf79 --- /dev/null +++ b/config.txt @@ -0,0 +1,16 @@ +{ + "path_checkpoints": [ + "models\\checkpoints" + ], + "path_loras": [ + "models\\loras" + ], + "path_embeddings": "models\\embeddings", + "path_vae_approx": "models\\vae_approx", + "path_upscale_models": "models\\upscale_models", + "path_inpaint": "models\\inpaint", + "path_controlnet": "models\\controlnet", + "path_clip_vision": "models\\clip_vision", + "path_fooocus_expansion": "models\\prompt_expansion\\fooocus_expansion", + "path_outputs": "outputs" +} \ No newline at end of file diff --git a/config_modification_tutorial.txt b/config_modification_tutorial.txt new file mode 100644 index 0000000000000000000000000000000000000000..f6a40b3437431f4874e813e17d915c2155dd6ce9 --- /dev/null +++ b/config_modification_tutorial.txt @@ -0,0 +1,123 @@ +You can modify your "D:\\ADITYA FILE\\Developer\\MICROSOFT\\Microsoft Visual Studio Code\\Project\\Application Website\\Nyxel\\Flowly AI\\My Project\\AI Image\\config.txt" using the below keys, formats, and examples. +Do not modify this file. Modifications in this file will not take effect. +This file is a tutorial and example. Please edit "D:\\ADITYA FILE\\Developer\\MICROSOFT\\Microsoft Visual Studio Code\\Project\\Application Website\\Nyxel\\Flowly AI\\My Project\\AI Image\\config.txt" to really change any settings. +Remember to split the paths with "\\" rather than "\", and there is no "," before the last "}". + + +{ + "path_checkpoints": [ + "models\\checkpoints" + ], + "path_loras": [ + "models\\loras" + ], + "path_embeddings": "models\\embeddings", + "path_vae_approx": "models\\vae_approx", + "path_upscale_models": "models\\upscale_models", + "path_inpaint": "models\\inpaint", + "path_controlnet": "models\\controlnet", + "path_clip_vision": "models\\clip_vision", + "path_fooocus_expansion": "models\\prompt_expansion\\fooocus_expansion", + "path_outputs": "outputs", + "default_model": "juggernautXL_v8Rundiffusion.safetensors", + "previous_default_models": [ + "juggernautXL_version8Rundiffusion.safetensors", + "juggernautXL_version7Rundiffusion.safetensors", + "juggernautXL_v7Rundiffusion.safetensors", + "juggernautXL_version6Rundiffusion.safetensors", + "juggernautXL_v6Rundiffusion.safetensors" + ], + "default_refiner": "None", + "default_refiner_switch": 0.5, + "default_loras_min_weight": -2, + "default_loras_max_weight": 2, + "default_loras": [ + [ + "sd_xl_offset_example-lora_1.0.safetensors", + 0.1 + ], + [ + "None", + 1.0 + ], + [ + "None", + 1.0 + ], + [ + "None", + 1.0 + ], + [ + "None", + 1.0 + ] + ], + "default_max_lora_number": 5, + "default_cfg_scale": 4.0, + "default_sample_sharpness": 2.0, + "default_sampler": "dpmpp_2m_sde_gpu", + "default_scheduler": "karras", + "default_styles": [ + "Fooocus V2", + "Fooocus Enhance", + "Fooocus Sharp" + ], + "default_prompt_negative": "", + "default_prompt": "", + "default_performance": "Speed", + "default_advanced_checkbox": false, + "default_max_image_number": 32, + "default_output_format": "png", + "default_image_number": 2, + "checkpoint_downloads": { + "juggernautXL_v8Rundiffusion.safetensors": "https://huggingface.co/lllyasviel/fav_models/resolve/main/fav/juggernautXL_v8Rundiffusion.safetensors" + }, + "lora_downloads": { + "sd_xl_offset_example-lora_1.0.safetensors": "https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_offset_example-lora_1.0.safetensors" + }, + "embeddings_downloads": {}, + "available_aspect_ratios": [ + "704*1408", + "704*1344", + "768*1344", + "768*1280", + "832*1216", + "832*1152", + "896*1152", + "896*1088", + "960*1088", + "960*1024", + "1024*1024", + "1024*960", + "1088*960", + "1088*896", + "1152*896", + "1152*832", + "1216*832", + "1280*768", + "1344*768", + "1344*704", + "1408*704", + "1472*704", + "1536*640", + "1600*640", + "1664*576", + "1728*576" + ], + "default_aspect_ratio": "1152*896", + "default_inpaint_engine_version": "v2.6", + "default_cfg_tsnr": 7.0, + "default_overwrite_step": -1, + "default_overwrite_switch": -1, + "example_inpaint_prompts": [ + "highly detailed face", + "detailed girl face", + "detailed man face", + "detailed hand", + "beautiful eyes" + ], + "default_save_metadata_to_images": true, + "default_metadata_scheme": "fooocus", + "metadata_created_by": "" +} \ No newline at end of file diff --git a/css/style.css b/css/style.css new file mode 100644 index 0000000000000000000000000000000000000000..010c8e7f6def1663633610755d84049978b97072 --- /dev/null +++ b/css/style.css @@ -0,0 +1,220 @@ +/* based on https://github.com/AUTOMATIC1111/stable-diffusion-webui/blob/v1.6.0/style.css */ + +#context-menu{ + z-index:9999; + position:absolute; + display:block; + padding:0px 0; + border:2px solid #a55000; + border-radius:8px; + box-shadow:1px 1px 2px #CE6400; + width: 200px; +} + +.context-menu-items{ + list-style: none; + margin: 0; + padding: 0; +} + +.context-menu-items a{ + display:block; + padding:5px; + cursor:pointer; +} + +.context-menu-items a:hover{ + background: #a55000; +} + +.canvas-tooltip-info { + position: absolute; + top: 28px; + left: 2px; + cursor: help; + background-color: rgba(0, 0, 0, 0.3); + width: 20px; + height: 20px; + border-radius: 50%; + display: flex; + align-items: center; + justify-content: center; + flex-direction: column; + + z-index: 100; +} + +.canvas-tooltip-info::after { + content: ''; + display: block; + width: 2px; + height: 7px; + background-color: white; + margin-top: 2px; +} + +.canvas-tooltip-info::before { + content: ''; + display: block; + width: 2px; + height: 2px; + background-color: white; +} + +.canvas-tooltip-content { + display: none; + background-color: #f9f9f9; + color: #333; + border: 1px solid #ddd; + padding: 15px; + position: absolute; + top: 40px; + left: 10px; + width: 250px; + font-size: 16px; + opacity: 0; + border-radius: 8px; + box-shadow: 0px 8px 16px 0px rgba(0,0,0,0.2); + + z-index: 100; +} + +.canvas-tooltip:hover .canvas-tooltip-content { + display: block; + animation: fadeIn 0.5s; + opacity: 1; +} + +@keyframes fadeIn { + from {opacity: 0;} + to {opacity: 1;} +} + +.styler { + overflow:inherit !important; +} + +.gradio-container{ + overflow: visible; +} + +/* fullpage image viewer */ + +#lightboxModal{ + display: none; + position: fixed; + z-index: 1001; + left: 0; + top: 0; + width: 100%; + height: 100%; + overflow: auto; + background-color: rgba(20, 20, 20, 0.95); + user-select: none; + -webkit-user-select: none; + flex-direction: column; +} + +.modalControls { + display: flex; + position: absolute; + right: 0px; + left: 0px; + gap: 1em; + padding: 1em; + background-color:rgba(0,0,0,0); + z-index: 1; + transition: 0.2s ease background-color; +} +.modalControls:hover { + background-color:rgba(0,0,0,0.9); +} +.modalClose { + margin-left: auto; +} +.modalControls span{ + color: white; + text-shadow: 0px 0px 0.25em black; + font-size: 35px; + font-weight: bold; + cursor: pointer; + width: 1em; +} + +.modalControls span:hover, .modalControls span:focus{ + color: #999; + text-decoration: none; +} + +#lightboxModal > img { + display: block; + margin: auto; + width: auto; +} + +#lightboxModal > img.modalImageFullscreen{ + object-fit: contain; + height: 100%; + width: 100%; + min-height: 0; +} + +.modalPrev, +.modalNext { + cursor: pointer; + position: absolute; + top: 50%; + width: auto; + padding: 16px; + margin-top: -50px; + color: white; + font-weight: bold; + font-size: 20px; + transition: 0.6s ease; + border-radius: 0 3px 3px 0; + user-select: none; + -webkit-user-select: none; +} + +.modalNext { + right: 0; + border-radius: 3px 0 0 3px; +} + +.modalPrev:hover, +.modalNext:hover { + background-color: rgba(0, 0, 0, 0.8); +} + +#imageARPreview { + position: absolute; + top: 0px; + left: 0px; + border: 2px solid red; + background: rgba(255, 0, 0, 0.3); + z-index: 900; + pointer-events: none; + display: none; +} + +#stylePreviewOverlay { + opacity: 0; + pointer-events: none; + width: 128px; + height: 128px; + position: fixed; + top: 0px; + left: 0px; + border: solid 1px lightgrey; + transform: translate(-140px, 20px); + background-size: cover; + background-position: center; + background-color: rgba(0, 0, 0, 0.3); + border-radius: 5px; + z-index: 100; + transition: transform 0.1s ease, opacity 0.3s ease; +} + +#stylePreviewOverlay.lower-half { + transform: translate(-140px, -140px); +} diff --git a/entry_with_update.py b/entry_with_update.py new file mode 100644 index 0000000000000000000000000000000000000000..4b66ac2d2822e3d50efb629d81aede4c0c62b1d5 --- /dev/null +++ b/entry_with_update.py @@ -0,0 +1,46 @@ +import os +import sys + + +root = os.path.dirname(os.path.abspath(__file__)) +sys.path.append(root) +os.chdir(root) + + +try: + import pygit2 + pygit2.option(pygit2.GIT_OPT_SET_OWNER_VALIDATION, 0) + + repo = pygit2.Repository(os.path.abspath(os.path.dirname(__file__))) + + branch_name = repo.head.shorthand + + remote_name = 'origin' + remote = repo.remotes[remote_name] + + remote.fetch() + + local_branch_ref = f'refs/heads/{branch_name}' + local_branch = repo.lookup_reference(local_branch_ref) + + remote_reference = f'refs/remotes/{remote_name}/{branch_name}' + remote_commit = repo.revparse_single(remote_reference) + + merge_result, _ = repo.merge_analysis(remote_commit.id) + + if merge_result & pygit2.GIT_MERGE_ANALYSIS_UP_TO_DATE: + print("Already up-to-date") + elif merge_result & pygit2.GIT_MERGE_ANALYSIS_FASTFORWARD: + local_branch.set_target(remote_commit.id) + repo.head.set_target(remote_commit.id) + repo.checkout_tree(repo.get(remote_commit.id)) + repo.reset(local_branch.target, pygit2.GIT_RESET_HARD) + print("Fast-forward merge") + elif merge_result & pygit2.GIT_MERGE_ANALYSIS_NORMAL: + print("Update failed - Did you modify any file?") +except Exception as e: + print('Update failed.') + print(str(e)) + +print('Update succeeded.') +from launch import * diff --git a/entrypoint.sh b/entrypoint.sh new file mode 100644 index 0000000000000000000000000000000000000000..d0dba09c266d42819c3fe789daa3781c00089796 --- /dev/null +++ b/entrypoint.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +ORIGINALDIR=/content/app +# Use predefined DATADIR if it is defined +[[ x"${DATADIR}" == "x" ]] && DATADIR=/content/data + +# Make persistent dir from original dir +function mklink () { + mkdir -p $DATADIR/$1 + ln -s $DATADIR/$1 $ORIGINALDIR +} + +# Copy old files from import dir +function import () { + (test -d /import/$1 && cd /import/$1 && cp -Rpn . $DATADIR/$1/) +} + +cd $ORIGINALDIR + +# models +mklink models +# Copy original files +(cd $ORIGINALDIR/models.org && cp -Rpn . $ORIGINALDIR/models/) +# Import old files +import models + +# outputs +mklink outputs +# Import old files +import outputs + +# Start application +python launch.py $* diff --git a/environment.yaml b/environment.yaml new file mode 100644 index 0000000000000000000000000000000000000000..55826b707c7c9813c7b7d5fd806340e8348e3683 --- /dev/null +++ b/environment.yaml @@ -0,0 +1,7 @@ +name: fooocus +channels: + - defaults +dependencies: + - python=3.10 + - pip=23.0 + - packaging diff --git a/experiments_expansion.py b/experiments_expansion.py new file mode 100644 index 0000000000000000000000000000000000000000..5a2a946a88326328b57ebe01e16bbeea7d37b300 --- /dev/null +++ b/experiments_expansion.py @@ -0,0 +1,8 @@ +from modules.expansion import FooocusExpansion + +expansion = FooocusExpansion() + +text = 'a handsome man' + +for i in range(64): + print(expansion(text, seed=i)) diff --git a/experiments_face.py b/experiments_face.py new file mode 100644 index 0000000000000000000000000000000000000000..3b4909fa87bd79786cae9116457bd3d6539281f0 --- /dev/null +++ b/experiments_face.py @@ -0,0 +1,7 @@ +import cv2 +import extras.face_crop as cropper + + +img = cv2.imread('lena.png') +result = cropper.crop_image(img) +cv2.imwrite('lena_result.png', result) diff --git a/experiments_interrogate.py b/experiments_interrogate.py new file mode 100644 index 0000000000000000000000000000000000000000..5b61a2ea19dc831a7cd3159564b26e7275d77ef1 --- /dev/null +++ b/experiments_interrogate.py @@ -0,0 +1,205 @@ +import os +import sys + + +root = os.path.dirname(os.path.abspath(__file__)) +sys.path.append(root) +os.chdir(root) + + +try: + import pygit2 + pygit2.option(pygit2.GIT_OPT_SET_OWNER_VALIDATION, 0) + + repo = pygit2.Repository(os.path.abspath(os.path.dirname(__file__))) + + branch_name = repo.head.shorthand + + remote_name = 'origin' + remote = repo.remotes[remote_name] + + remote.fetch() + + local_branch_ref = f'refs/heads/{branch_name}' + local_branch = repo.lookup_reference(local_branch_ref) + + remote_reference = f'refs/remotes/{remote_name}/{branch_name}' + remote_commit = repo.revparse_single(remote_reference) + + merge_result, _ = repo.merge_analysis(remote_commit.id) + + if merge_result & pygit2.GIT_MERGE_ANALYSIS_UP_TO_DATE: + print("Already up-to-date") + elif merge_result & pygit2.GIT_MERGE_ANALYSIS_FASTFORWARD: + local_branch.set_target(remote_commit.id) + repo.head.set_target(remote_commit.id) + repo.checkout_tree(repo.get(remote_commit.id)) + repo.reset(local_branch.target, pygit2.GIT_RESET_HARD) + print("Fast-forward merge") + elif merge_result & pygit2.GIT_MERGE_ANALYSIS_NORMAL: + print("Update failed - Did you modify any file?") +except Exception as e: + print('Update failed.') + print(str(e)) + +import os +import sys +import ssl + +print('[System ARGV] ' + str(sys.argv)) + +root = os.path.dirname(os.path.abspath(__file__)) +sys.path.append(root) +os.chdir(root) + +os.environ["PYTORCH_ENABLE_MPS_FALLBACK"] = "1" +os.environ["PYTORCH_MPS_HIGH_WATERMARK_RATIO"] = "0.0" +if "GRADIO_SERVER_PORT" not in os.environ: + os.environ["GRADIO_SERVER_PORT"] = "7865" + +ssl._create_default_https_context = ssl._create_unverified_context + + +import platform +import fooocus_version + +from build_launcher import build_launcher +from modules.launch_util import is_installed, run, python, run_pip, requirements_met +from modules.model_loader import load_file_from_url + + +REINSTALL_ALL = False +TRY_INSTALL_XFORMERS = False + + +def prepare_environment(): + torch_index_url = os.environ.get('TORCH_INDEX_URL', "https://download.pytorch.org/whl/cu121") + torch_command = os.environ.get('TORCH_COMMAND', + f"pip install torch==2.1.0 torchvision==0.16.0 --extra-index-url {torch_index_url}") + requirements_file = os.environ.get('REQS_FILE', "requirements_versions.txt") + + print(f"Python {sys.version}") + print(f"Fooocus version: {fooocus_version.version}") + + if REINSTALL_ALL or not is_installed("torch") or not is_installed("torchvision"): + run(f'"{python}" -m {torch_command}', "Installing torch and torchvision", "Couldn't install torch", live=True) + + if TRY_INSTALL_XFORMERS: + if REINSTALL_ALL or not is_installed("xformers"): + xformers_package = os.environ.get('XFORMERS_PACKAGE', 'xformers==0.0.23') + if platform.system() == "Windows": + if platform.python_version().startswith("3.10"): + run_pip(f"install -U -I --no-deps {xformers_package}", "xformers", live=True) + else: + print("Installation of xformers is not supported in this version of Python.") + print( + "You can also check this and build manually: https://github.com/AUTOMATIC1111/stable-diffusion-webui/wiki/Xformers#building-xformers-on-windows-by-duckness") + if not is_installed("xformers"): + exit(0) + elif platform.system() == "Linux": + run_pip(f"install -U -I --no-deps {xformers_package}", "xformers") + + if REINSTALL_ALL or not requirements_met(requirements_file): + run_pip(f"install -r \"{requirements_file}\"", "requirements") + + return + + +vae_approx_filenames = [ + ('xlvaeapp.pth', 'https://huggingface.co/lllyasviel/misc/resolve/main/xlvaeapp.pth'), + ('vaeapp_sd15.pth', 'https://huggingface.co/lllyasviel/misc/resolve/main/vaeapp_sd15.pt'), + ('xl-to-v1_interposer-v3.1.safetensors', + 'https://huggingface.co/lllyasviel/misc/resolve/main/xl-to-v1_interposer-v3.1.safetensors') +] + +def ini_args(): + from args_manager import args + return args + + +prepare_environment() +build_launcher() +args = ini_args() + + +if args.gpu_device_id is not None: + os.environ['CUDA_VISIBLE_DEVICES'] = str(args.gpu_device_id) + print("Set device to:", args.gpu_device_id) + + +from modules import config + +def download_models(): + for file_name, url in vae_approx_filenames: + load_file_from_url(url=url, model_dir=config.path_vae_approx, file_name=file_name) + + load_file_from_url( + url='https://huggingface.co/lllyasviel/misc/resolve/main/fooocus_expansion.bin', + model_dir=config.path_fooocus_expansion, + file_name='pytorch_model.bin' + ) + + if args.disable_preset_download: + print('Skipped model download.') + return + + if not args.always_download_new_model: + if not os.path.exists(os.path.join(config.paths_checkpoints[0], config.default_base_model_name)): + for alternative_model_name in config.previous_default_models: + if os.path.exists(os.path.join(config.paths_checkpoints[0], alternative_model_name)): + print(f'You do not have [{config.default_base_model_name}] but you have [{alternative_model_name}].') + print(f'Fooocus will use [{alternative_model_name}] to avoid downloading new models, ' + f'but you are not using latest models.') + print('Use --always-download-new-model to avoid fallback and always get new models.') + config.checkpoint_downloads = {} + config.default_base_model_name = alternative_model_name + break + + for file_name, url in config.checkpoint_downloads.items(): + load_file_from_url(url=url, model_dir=config.paths_checkpoints[0], file_name=file_name) + for file_name, url in config.embeddings_downloads.items(): + load_file_from_url(url=url, model_dir=config.path_embeddings, file_name=file_name) + for file_name, url in config.lora_downloads.items(): + load_file_from_url(url=url, model_dir=config.paths_loras[0], file_name=file_name) + + return + + +download_models() + +import gradio as gr +import modules.gradio_hijack as grh +from extras.interrogate import default_interrogator as default_interrogator_photo +from extras.wd14tagger import default_interrogator as default_interrogator_anime +import modules.flags as flags + +def interrogatorFunction(img, value): + if value == flags.desc_type_photo: # Menggunakan operator perbandingan '==' untuk memeriksa kesamaan + output = default_interrogator_photo(img) + print(output) + else: + output = default_interrogator_anime(img) + print(output) + return output + +describe = gr.Blocks(title="AI Describe Image", css="#component-3, #component-5 {display: grid; align-content: center;}") + +with describe: + describe_tab = gr.TabItem(label='Describe') + with describe_tab: + input_column = gr.Row() + with input_column: + with gr.Column(): + input_image = grh.Image(label='Input', source='upload', type='numpy') + with gr.Column(): + content_type = gr.Radio( + label='Content Type', + choices=[flags.desc_type_photo, flags.desc_type_anime], + value=flags.desc_type_photo + ) + desc_btn = gr.Button(value='Describe this Image into Prompt') + outputs=gr.Textbox(type="text", label="Output", show_copy_button=True) + + desc_btn.click(interrogatorFunction, inputs=[input_image, content_type], outputs=[outputs]) + +describe.launch() diff --git a/extras/BLIP/configs/bert_config.json b/extras/BLIP/configs/bert_config.json new file mode 100644 index 0000000000000000000000000000000000000000..3ef38aabc7f966b53079e9d559dc59e459cc0051 --- /dev/null +++ b/extras/BLIP/configs/bert_config.json @@ -0,0 +1,21 @@ +{ + "architectures": [ + "BertModel" + ], + "attention_probs_dropout_prob": 0.1, + "hidden_act": "gelu", + "hidden_dropout_prob": 0.1, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-12, + "max_position_embeddings": 512, + "model_type": "bert", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "pad_token_id": 0, + "type_vocab_size": 2, + "vocab_size": 30522, + "encoder_width": 768, + "add_cross_attention": true +} diff --git a/extras/BLIP/configs/caption_coco.yaml b/extras/BLIP/configs/caption_coco.yaml new file mode 100644 index 0000000000000000000000000000000000000000..42eab7030c0310ba2f265baf36fa1400aa6e5846 --- /dev/null +++ b/extras/BLIP/configs/caption_coco.yaml @@ -0,0 +1,33 @@ +image_root: '/export/share/datasets/vision/coco/images/' +ann_root: 'annotation' +coco_gt_root: 'annotation/coco_gt' + +# set pretrained as a file path or an url +pretrained: 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model_base_caption_capfilt_large.pth' + +# size of vit model; base or large +vit: 'base' +vit_grad_ckpt: False +vit_ckpt_layer: 0 +batch_size: 32 +init_lr: 1e-5 + +# vit: 'large' +# vit_grad_ckpt: True +# vit_ckpt_layer: 5 +# batch_size: 16 +# init_lr: 2e-6 + +image_size: 384 + +# generation configs +max_length: 20 +min_length: 5 +num_beams: 3 +prompt: 'a picture of ' + +# optimizer +weight_decay: 0.05 +min_lr: 0 +max_epoch: 5 + diff --git a/extras/BLIP/configs/med_config.json b/extras/BLIP/configs/med_config.json new file mode 100644 index 0000000000000000000000000000000000000000..0ffad0a6f3c2f9f11b8faa84529d9860bb70327a --- /dev/null +++ b/extras/BLIP/configs/med_config.json @@ -0,0 +1,21 @@ +{ + "architectures": [ + "BertModel" + ], + "attention_probs_dropout_prob": 0.1, + "hidden_act": "gelu", + "hidden_dropout_prob": 0.1, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-12, + "max_position_embeddings": 512, + "model_type": "bert", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "pad_token_id": 0, + "type_vocab_size": 2, + "vocab_size": 30524, + "encoder_width": 768, + "add_cross_attention": true +} diff --git a/extras/BLIP/configs/nlvr.yaml b/extras/BLIP/configs/nlvr.yaml new file mode 100644 index 0000000000000000000000000000000000000000..2d1122aadb1a776bd347068233096b0c984f648b --- /dev/null +++ b/extras/BLIP/configs/nlvr.yaml @@ -0,0 +1,21 @@ +image_root: '/export/share/datasets/vision/NLVR2/' +ann_root: 'annotation' + +# set pretrained as a file path or an url +pretrained: 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model_base_nlvr.pth' + +#size of vit model; base or large +vit: 'base' +batch_size_train: 16 +batch_size_test: 64 +vit_grad_ckpt: False +vit_ckpt_layer: 0 +max_epoch: 15 + +image_size: 384 + +# optimizer +weight_decay: 0.05 +init_lr: 3e-5 +min_lr: 0 + diff --git a/extras/BLIP/configs/nocaps.yaml b/extras/BLIP/configs/nocaps.yaml new file mode 100644 index 0000000000000000000000000000000000000000..9028135859b94aef5324c85c80e376c609d8a089 --- /dev/null +++ b/extras/BLIP/configs/nocaps.yaml @@ -0,0 +1,15 @@ +image_root: '/export/share/datasets/vision/nocaps/' +ann_root: 'annotation' + +# set pretrained as a file path or an url +pretrained: 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model_base_caption_capfilt_large.pth' + +vit: 'base' +batch_size: 32 + +image_size: 384 + +max_length: 20 +min_length: 5 +num_beams: 3 +prompt: 'a picture of ' \ No newline at end of file diff --git a/extras/BLIP/configs/pretrain.yaml b/extras/BLIP/configs/pretrain.yaml new file mode 100644 index 0000000000000000000000000000000000000000..02355ee0228932803c661616485bf315e862b826 --- /dev/null +++ b/extras/BLIP/configs/pretrain.yaml @@ -0,0 +1,27 @@ +train_file: ['/export/share/junnan-li/VL_pretrain/annotation/coco_karpathy_train.json', + '/export/share/junnan-li/VL_pretrain/annotation/vg_caption.json', + ] +laion_path: '' + +# size of vit model; base or large +vit: 'base' +vit_grad_ckpt: False +vit_ckpt_layer: 0 + +image_size: 224 +batch_size: 75 + +queue_size: 57600 +alpha: 0.4 + +# optimizer +weight_decay: 0.05 +init_lr: 3e-4 +min_lr: 1e-6 +warmup_lr: 1e-6 +lr_decay_rate: 0.9 +max_epoch: 20 +warmup_steps: 3000 + + + diff --git a/extras/BLIP/configs/retrieval_coco.yaml b/extras/BLIP/configs/retrieval_coco.yaml new file mode 100644 index 0000000000000000000000000000000000000000..a8569e9b67112fe3605ac25e4fdc0231f7975378 --- /dev/null +++ b/extras/BLIP/configs/retrieval_coco.yaml @@ -0,0 +1,34 @@ +image_root: '/export/share/datasets/vision/coco/images/' +ann_root: 'annotation' +dataset: 'coco' + +# set pretrained as a file path or an url +pretrained: 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model_base_retrieval_coco.pth' + +# size of vit model; base or large + +vit: 'base' +batch_size_train: 32 +batch_size_test: 64 +vit_grad_ckpt: True +vit_ckpt_layer: 4 +init_lr: 1e-5 + +# vit: 'large' +# batch_size_train: 16 +# batch_size_test: 32 +# vit_grad_ckpt: True +# vit_ckpt_layer: 12 +# init_lr: 5e-6 + +image_size: 384 +queue_size: 57600 +alpha: 0.4 +k_test: 256 +negative_all_rank: True + +# optimizer +weight_decay: 0.05 +min_lr: 0 +max_epoch: 6 + diff --git a/extras/BLIP/configs/retrieval_flickr.yaml b/extras/BLIP/configs/retrieval_flickr.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d75ea4eed87c9a001523c5e5914998c5e737594d --- /dev/null +++ b/extras/BLIP/configs/retrieval_flickr.yaml @@ -0,0 +1,34 @@ +image_root: '/export/share/datasets/vision/flickr30k/' +ann_root: 'annotation' +dataset: 'flickr' + +# set pretrained as a file path or an url +pretrained: 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model_base_retrieval_flickr.pth' + +# size of vit model; base or large + +vit: 'base' +batch_size_train: 32 +batch_size_test: 64 +vit_grad_ckpt: True +vit_ckpt_layer: 4 +init_lr: 1e-5 + +# vit: 'large' +# batch_size_train: 16 +# batch_size_test: 32 +# vit_grad_ckpt: True +# vit_ckpt_layer: 10 +# init_lr: 5e-6 + +image_size: 384 +queue_size: 57600 +alpha: 0.4 +k_test: 128 +negative_all_rank: False + +# optimizer +weight_decay: 0.05 +min_lr: 0 +max_epoch: 6 + diff --git a/extras/BLIP/configs/retrieval_msrvtt.yaml b/extras/BLIP/configs/retrieval_msrvtt.yaml new file mode 100644 index 0000000000000000000000000000000000000000..395f62542bb22d706b8e19e2455d2c7298984d0b --- /dev/null +++ b/extras/BLIP/configs/retrieval_msrvtt.yaml @@ -0,0 +1,12 @@ +video_root: '/export/share/dongxuli/data/msrvtt_retrieval/videos' +ann_root: 'annotation' + +# set pretrained as a file path or an url +pretrained: 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model_base_retrieval_coco.pth' + +# size of vit model; base or large +vit: 'base' +batch_size: 64 +k_test: 128 +image_size: 384 +num_frm_test: 8 \ No newline at end of file diff --git a/extras/BLIP/configs/vqa.yaml b/extras/BLIP/configs/vqa.yaml new file mode 100644 index 0000000000000000000000000000000000000000..74327e6d0a34672023b44569558fe8beeb052548 --- /dev/null +++ b/extras/BLIP/configs/vqa.yaml @@ -0,0 +1,25 @@ +vqa_root: '/export/share/datasets/vision/VQA/Images/mscoco/' #followed by train2014/ +vg_root: '/export/share/datasets/vision/visual-genome/' #followed by image/ +train_files: ['vqa_train','vqa_val','vg_qa'] +ann_root: 'annotation' + +# set pretrained as a file path or an url +pretrained: 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model_base_vqa_capfilt_large.pth' + +# size of vit model; base or large +vit: 'base' +batch_size_train: 16 +batch_size_test: 32 +vit_grad_ckpt: False +vit_ckpt_layer: 0 +init_lr: 2e-5 + +image_size: 480 + +k_test: 128 +inference: 'rank' + +# optimizer +weight_decay: 0.05 +min_lr: 0 +max_epoch: 10 \ No newline at end of file diff --git a/extras/BLIP/models/__pycache__/blip.cpython-310.pyc b/extras/BLIP/models/__pycache__/blip.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fe42dfe97835bb757be0599e4692cbbea5d63294 Binary files /dev/null and b/extras/BLIP/models/__pycache__/blip.cpython-310.pyc differ diff --git a/extras/BLIP/models/__pycache__/med.cpython-310.pyc b/extras/BLIP/models/__pycache__/med.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f3c9bb1c9d15a399407638fd18bd2aecfa17172f Binary files /dev/null and b/extras/BLIP/models/__pycache__/med.cpython-310.pyc differ diff --git a/extras/BLIP/models/__pycache__/vit.cpython-310.pyc b/extras/BLIP/models/__pycache__/vit.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..46dfee0a9ed158de170a9ca2485a9d460457a434 Binary files /dev/null and b/extras/BLIP/models/__pycache__/vit.cpython-310.pyc differ diff --git a/extras/BLIP/models/bert_tokenizer/config.json b/extras/BLIP/models/bert_tokenizer/config.json new file mode 100644 index 0000000000000000000000000000000000000000..45a2321a7ecfdaaf60a6c1fd7f5463994cc8907d --- /dev/null +++ b/extras/BLIP/models/bert_tokenizer/config.json @@ -0,0 +1,23 @@ +{ + "architectures": [ + "BertForMaskedLM" + ], + "attention_probs_dropout_prob": 0.1, + "gradient_checkpointing": false, + "hidden_act": "gelu", + "hidden_dropout_prob": 0.1, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-12, + "max_position_embeddings": 512, + "model_type": "bert", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "pad_token_id": 0, + "position_embedding_type": "absolute", + "transformers_version": "4.6.0.dev0", + "type_vocab_size": 2, + "use_cache": true, + "vocab_size": 30522 +} diff --git a/extras/BLIP/models/bert_tokenizer/tokenizer.json b/extras/BLIP/models/bert_tokenizer/tokenizer.json new file mode 100644 index 0000000000000000000000000000000000000000..949a6f013d67eb8a5b4b5b46026217b888021b88 --- /dev/null +++ b/extras/BLIP/models/bert_tokenizer/tokenizer.json @@ -0,0 +1 @@ +{"version":"1.0","truncation":null,"padding":null,"added_tokens":[{"id":0,"special":true,"content":"[PAD]","single_word":false,"lstrip":false,"rstrip":false,"normalized":false},{"id":100,"special":true,"content":"[UNK]","single_word":false,"lstrip":false,"rstrip":false,"normalized":false},{"id":101,"special":true,"content":"[CLS]","single_word":false,"lstrip":false,"rstrip":false,"normalized":false},{"id":102,"special":true,"content":"[SEP]","single_word":false,"lstrip":false,"rstrip":false,"normalized":false},{"id":103,"special":true,"content":"[MASK]","single_word":false,"lstrip":false,"rstrip":false,"normalized":false}],"normalizer":{"type":"BertNormalizer","clean_text":true,"handle_chinese_chars":true,"strip_accents":null,"lowercase":true},"pre_tokenizer":{"type":"BertPreTokenizer"},"post_processor":{"type":"TemplateProcessing","single":[{"SpecialToken":{"id":"[CLS]","type_id":0}},{"Sequence":{"id":"A","type_id":0}},{"SpecialToken":{"id":"[SEP]","type_id":0}}],"pair":[{"SpecialToken":{"id":"[CLS]","type_id":0}},{"Sequence":{"id":"A","type_id":0}},{"SpecialToken":{"id":"[SEP]","type_id":0}},{"Sequence":{"id":"B","type_id":1}},{"SpecialToken":{"id":"[SEP]","type_id":1}}],"special_tokens":{"[CLS]":{"id":"[CLS]","ids":[101],"tokens":["[CLS]"]},"[SEP]":{"id":"[SEP]","ids":[102],"tokens":["[SEP]"]}}},"decoder":{"type":"WordPiece","prefix":"##","cleanup":true},"model":{"unk_token":"[UNK]","continuing_subword_prefix":"##","max_input_chars_per_word":100,"vocab":{"[PAD]":0,"[unused0]":1,"[unused1]":2,"[unused2]":3,"[unused3]":4,"[unused4]":5,"[unused5]":6,"[unused6]":7,"[unused7]":8,"[unused8]":9,"[unused9]":10,"[unused10]":11,"[unused11]":12,"[unused12]":13,"[unused13]":14,"[unused14]":15,"[unused15]":16,"[unused16]":17,"[unused17]":18,"[unused18]":19,"[unused19]":20,"[unused20]":21,"[unused21]":22,"[unused22]":23,"[unused23]":24,"[unused24]":25,"[unused25]":26,"[unused26]":27,"[unused27]":28,"[unused28]":29,"[unused29]":30,"[unused30]":31,"[unused31]":32,"[unused32]":33,"[unused33]":34,"[unused34]":35,"[unused35]":36,"[unused36]":37,"[unused37]":38,"[unused38]":39,"[unused39]":40,"[unused40]":41,"[unused41]":42,"[unused42]":43,"[unused43]":44,"[unused44]":45,"[unused45]":46,"[unused46]":47,"[unused47]":48,"[unused48]":49,"[unused49]":50,"[unused50]":51,"[unused51]":52,"[unused52]":53,"[unused53]":54,"[unused54]":55,"[unused55]":56,"[unused56]":57,"[unused57]":58,"[unused58]":59,"[unused59]":60,"[unused60]":61,"[unused61]":62,"[unused62]":63,"[unused63]":64,"[unused64]":65,"[unused65]":66,"[unused66]":67,"[unused67]":68,"[unused68]":69,"[unused69]":70,"[unused70]":71,"[unused71]":72,"[unused72]":73,"[unused73]":74,"[unused74]":75,"[unused75]":76,"[unused76]":77,"[unused77]":78,"[unused78]":79,"[unused79]":80,"[unused80]":81,"[unused81]":82,"[unused82]":83,"[unused83]":84,"[unused84]":85,"[unused85]":86,"[unused86]":87,"[unused87]":88,"[unused88]":89,"[unused89]":90,"[unused90]":91,"[unused91]":92,"[unused92]":93,"[unused93]":94,"[unused94]":95,"[unused95]":96,"[unused96]":97,"[unused97]":98,"[unused98]":99,"[UNK]":100,"[CLS]":101,"[SEP]":102,"[MASK]":103,"[unused99]":104,"[unused100]":105,"[unused101]":106,"[unused102]":107,"[unused103]":108,"[unused104]":109,"[unused105]":110,"[unused106]":111,"[unused107]":112,"[unused108]":113,"[unused109]":114,"[unused110]":115,"[unused111]":116,"[unused112]":117,"[unused113]":118,"[unused114]":119,"[unused115]":120,"[unused116]":121,"[unused117]":122,"[unused118]":123,"[unused119]":124,"[unused120]":125,"[unused121]":126,"[unused122]":127,"[unused123]":128,"[unused124]":129,"[unused125]":130,"[unused126]":131,"[unused127]":132,"[unused128]":133,"[unused129]":134,"[unused130]":135,"[unused131]":136,"[unused132]":137,"[unused133]":138,"[unused134]":139,"[unused135]":140,"[unused136]":141,"[unused137]":142,"[unused138]":143,"[unused139]":144,"[unused140]":145,"[unused141]":146,"[unused142]":147,"[unused143]":148,"[unused144]":149,"[unused145]":150,"[unused146]":151,"[unused147]":152,"[unused148]":153,"[unused149]":154,"[unused150]":155,"[unused151]":156,"[unused152]":157,"[unused153]":158,"[unused154]":159,"[unused155]":160,"[unused156]":161,"[unused157]":162,"[unused158]":163,"[unused159]":164,"[unused160]":165,"[unused161]":166,"[unused162]":167,"[unused163]":168,"[unused164]":169,"[unused165]":170,"[unused166]":171,"[unused167]":172,"[unused168]":173,"[unused169]":174,"[unused170]":175,"[unused171]":176,"[unused172]":177,"[unused173]":178,"[unused174]":179,"[unused175]":180,"[unused176]":181,"[unused177]":182,"[unused178]":183,"[unused179]":184,"[unused180]":185,"[unused181]":186,"[unused182]":187,"[unused183]":188,"[unused184]":189,"[unused185]":190,"[unused186]":191,"[unused187]":192,"[unused188]":193,"[unused189]":194,"[unused190]":195,"[unused191]":196,"[unused192]":197,"[unused193]":198,"[unused194]":199,"[unused195]":200,"[unused196]":201,"[unused197]":202,"[unused198]":203,"[unused199]":204,"[unused200]":205,"[unused201]":206,"[unused202]":207,"[unused203]":208,"[unused204]":209,"[unused205]":210,"[unused206]":211,"[unused207]":212,"[unused208]":213,"[unused209]":214,"[unused210]":215,"[unused211]":216,"[unused212]":217,"[unused213]":218,"[unused214]":219,"[unused215]":220,"[unused216]":221,"[unused217]":222,"[unused218]":223,"[unused219]":224,"[unused220]":225,"[unused221]":226,"[unused222]":227,"[unused223]":228,"[unused224]":229,"[unused225]":230,"[unused226]":231,"[unused227]":232,"[unused228]":233,"[unused229]":234,"[unused230]":235,"[unused231]":236,"[unused232]":237,"[unused233]":238,"[unused234]":239,"[unused235]":240,"[unused236]":241,"[unused237]":242,"[unused238]":243,"[unused239]":244,"[unused240]":245,"[unused241]":246,"[unused242]":247,"[unused243]":248,"[unused244]":249,"[unused245]":250,"[unused246]":251,"[unused247]":252,"[unused248]":253,"[unused249]":254,"[unused250]":255,"[unused251]":256,"[unused252]":257,"[unused253]":258,"[unused254]":259,"[unused255]":260,"[unused256]":261,"[unused257]":262,"[unused258]":263,"[unused259]":264,"[unused260]":265,"[unused261]":266,"[unused262]":267,"[unused263]":268,"[unused264]":269,"[unused265]":270,"[unused266]":271,"[unused267]":272,"[unused268]":273,"[unused269]":274,"[unused270]":275,"[unused271]":276,"[unused272]":277,"[unused273]":278,"[unused274]":279,"[unused275]":280,"[unused276]":281,"[unused277]":282,"[unused278]":283,"[unused279]":284,"[unused280]":285,"[unused281]":286,"[unused282]":287,"[unused283]":288,"[unused284]":289,"[unused285]":290,"[unused286]":291,"[unused287]":292,"[unused288]":293,"[unused289]":294,"[unused290]":295,"[unused291]":296,"[unused292]":297,"[unused293]":298,"[unused294]":299,"[unused295]":300,"[unused296]":301,"[unused297]":302,"[unused298]":303,"[unused299]":304,"[unused300]":305,"[unused301]":306,"[unused302]":307,"[unused303]":308,"[unused304]":309,"[unused305]":310,"[unused306]":311,"[unused307]":312,"[unused308]":313,"[unused309]":314,"[unused310]":315,"[unused311]":316,"[unused312]":317,"[unused313]":318,"[unused314]":319,"[unused315]":320,"[unused316]":321,"[unused317]":322,"[unused318]":323,"[unused319]":324,"[unused320]":325,"[unused321]":326,"[unused322]":327,"[unused323]":328,"[unused324]":329,"[unused325]":330,"[unused326]":331,"[unused327]":332,"[unused328]":333,"[unused329]":334,"[unused330]":335,"[unused331]":336,"[unused332]":337,"[unused333]":338,"[unused334]":339,"[unused335]":340,"[unused336]":341,"[unused337]":342,"[unused338]":343,"[unused339]":344,"[unused340]":345,"[unused341]":346,"[unused342]":347,"[unused343]":348,"[unused344]":349,"[unused345]":350,"[unused346]":351,"[unused347]":352,"[unused348]":353,"[unused349]":354,"[unused350]":355,"[unused351]":356,"[unused352]":357,"[unused353]":358,"[unused354]":359,"[unused355]":360,"[unused356]":361,"[unused357]":362,"[unused358]":363,"[unused359]":364,"[unused360]":365,"[unused361]":366,"[unused362]":367,"[unused363]":368,"[unused364]":369,"[unused365]":370,"[unused366]":371,"[unused367]":372,"[unused368]":373,"[unused369]":374,"[unused370]":375,"[unused371]":376,"[unused372]":377,"[unused373]":378,"[unused374]":379,"[unused375]":380,"[unused376]":381,"[unused377]":382,"[unused378]":383,"[unused379]":384,"[unused380]":385,"[unused381]":386,"[unused382]":387,"[unused383]":388,"[unused384]":389,"[unused385]":390,"[unused386]":391,"[unused387]":392,"[unused388]":393,"[unused389]":394,"[unused390]":395,"[unused391]":396,"[unused392]":397,"[unused393]":398,"[unused394]":399,"[unused395]":400,"[unused396]":401,"[unused397]":402,"[unused398]":403,"[unused399]":404,"[unused400]":405,"[unused401]":406,"[unused402]":407,"[unused403]":408,"[unused404]":409,"[unused405]":410,"[unused406]":411,"[unused407]":412,"[unused408]":413,"[unused409]":414,"[unused410]":415,"[unused411]":416,"[unused412]":417,"[unused413]":418,"[unused414]":419,"[unused415]":420,"[unused416]":421,"[unused417]":422,"[unused418]":423,"[unused419]":424,"[unused420]":425,"[unused421]":426,"[unused422]":427,"[unused423]":428,"[unused424]":429,"[unused425]":430,"[unused426]":431,"[unused427]":432,"[unused428]":433,"[unused429]":434,"[unused430]":435,"[unused431]":436,"[unused432]":437,"[unused433]":438,"[unused434]":439,"[unused435]":440,"[unused436]":441,"[unused437]":442,"[unused438]":443,"[unused439]":444,"[unused440]":445,"[unused441]":446,"[unused442]":447,"[unused443]":448,"[unused444]":449,"[unused445]":450,"[unused446]":451,"[unused447]":452,"[unused448]":453,"[unused449]":454,"[unused450]":455,"[unused451]":456,"[unused452]":457,"[unused453]":458,"[unused454]":459,"[unused455]":460,"[unused456]":461,"[unused457]":462,"[unused458]":463,"[unused459]":464,"[unused460]":465,"[unused461]":466,"[unused462]":467,"[unused463]":468,"[unused464]":469,"[unused465]":470,"[unused466]":471,"[unused467]":472,"[unused468]":473,"[unused469]":474,"[unused470]":475,"[unused471]":476,"[unused472]":477,"[unused473]":478,"[unused474]":479,"[unused475]":480,"[unused476]":481,"[unused477]":482,"[unused478]":483,"[unused479]":484,"[unused480]":485,"[unused481]":486,"[unused482]":487,"[unused483]":488,"[unused484]":489,"[unused485]":490,"[unused486]":491,"[unused487]":492,"[unused488]":493,"[unused489]":494,"[unused490]":495,"[unused491]":496,"[unused492]":497,"[unused493]":498,"[unused494]":499,"[unused495]":500,"[unused496]":501,"[unused497]":502,"[unused498]":503,"[unused499]":504,"[unused500]":505,"[unused501]":506,"[unused502]":507,"[unused503]":508,"[unused504]":509,"[unused505]":510,"[unused506]":511,"[unused507]":512,"[unused508]":513,"[unused509]":514,"[unused510]":515,"[unused511]":516,"[unused512]":517,"[unused513]":518,"[unused514]":519,"[unused515]":520,"[unused516]":521,"[unused517]":522,"[unused518]":523,"[unused519]":524,"[unused520]":525,"[unused521]":526,"[unused522]":527,"[unused523]":528,"[unused524]":529,"[unused525]":530,"[unused526]":531,"[unused527]":532,"[unused528]":533,"[unused529]":534,"[unused530]":535,"[unused531]":536,"[unused532]":537,"[unused533]":538,"[unused534]":539,"[unused535]":540,"[unused536]":541,"[unused537]":542,"[unused538]":543,"[unused539]":544,"[unused540]":545,"[unused541]":546,"[unused542]":547,"[unused543]":548,"[unused544]":549,"[unused545]":550,"[unused546]":551,"[unused547]":552,"[unused548]":553,"[unused549]":554,"[unused550]":555,"[unused551]":556,"[unused552]":557,"[unused553]":558,"[unused554]":559,"[unused555]":560,"[unused556]":561,"[unused557]":562,"[unused558]":563,"[unused559]":564,"[unused560]":565,"[unused561]":566,"[unused562]":567,"[unused563]":568,"[unused564]":569,"[unused565]":570,"[unused566]":571,"[unused567]":572,"[unused568]":573,"[unused569]":574,"[unused570]":575,"[unused571]":576,"[unused572]":577,"[unused573]":578,"[unused574]":579,"[unused575]":580,"[unused576]":581,"[unused577]":582,"[unused578]":583,"[unused579]":584,"[unused580]":585,"[unused581]":586,"[unused582]":587,"[unused583]":588,"[unused584]":589,"[unused585]":590,"[unused586]":591,"[unused587]":592,"[unused588]":593,"[unused589]":594,"[unused590]":595,"[unused591]":596,"[unused592]":597,"[unused593]":598,"[unused594]":599,"[unused595]":600,"[unused596]":601,"[unused597]":602,"[unused598]":603,"[unused599]":604,"[unused600]":605,"[unused601]":606,"[unused602]":607,"[unused603]":608,"[unused604]":609,"[unused605]":610,"[unused606]":611,"[unused607]":612,"[unused608]":613,"[unused609]":614,"[unused610]":615,"[unused611]":616,"[unused612]":617,"[unused613]":618,"[unused614]":619,"[unused615]":620,"[unused616]":621,"[unused617]":622,"[unused618]":623,"[unused619]":624,"[unused620]":625,"[unused621]":626,"[unused622]":627,"[unused623]":628,"[unused624]":629,"[unused625]":630,"[unused626]":631,"[unused627]":632,"[unused628]":633,"[unused629]":634,"[unused630]":635,"[unused631]":636,"[unused632]":637,"[unused633]":638,"[unused634]":639,"[unused635]":640,"[unused636]":641,"[unused637]":642,"[unused638]":643,"[unused639]":644,"[unused640]":645,"[unused641]":646,"[unused642]":647,"[unused643]":648,"[unused644]":649,"[unused645]":650,"[unused646]":651,"[unused647]":652,"[unused648]":653,"[unused649]":654,"[unused650]":655,"[unused651]":656,"[unused652]":657,"[unused653]":658,"[unused654]":659,"[unused655]":660,"[unused656]":661,"[unused657]":662,"[unused658]":663,"[unused659]":664,"[unused660]":665,"[unused661]":666,"[unused662]":667,"[unused663]":668,"[unused664]":669,"[unused665]":670,"[unused666]":671,"[unused667]":672,"[unused668]":673,"[unused669]":674,"[unused670]":675,"[unused671]":676,"[unused672]":677,"[unused673]":678,"[unused674]":679,"[unused675]":680,"[unused676]":681,"[unused677]":682,"[unused678]":683,"[unused679]":684,"[unused680]":685,"[unused681]":686,"[unused682]":687,"[unused683]":688,"[unused684]":689,"[unused685]":690,"[unused686]":691,"[unused687]":692,"[unused688]":693,"[unused689]":694,"[unused690]":695,"[unused691]":696,"[unused692]":697,"[unused693]":698,"[unused694]":699,"[unused695]":700,"[unused696]":701,"[unused697]":702,"[unused698]":703,"[unused699]":704,"[unused700]":705,"[unused701]":706,"[unused702]":707,"[unused703]":708,"[unused704]":709,"[unused705]":710,"[unused706]":711,"[unused707]":712,"[unused708]":713,"[unused709]":714,"[unused710]":715,"[unused711]":716,"[unused712]":717,"[unused713]":718,"[unused714]":719,"[unused715]":720,"[unused716]":721,"[unused717]":722,"[unused718]":723,"[unused719]":724,"[unused720]":725,"[unused721]":726,"[unused722]":727,"[unused723]":728,"[unused724]":729,"[unused725]":730,"[unused726]":731,"[unused727]":732,"[unused728]":733,"[unused729]":734,"[unused730]":735,"[unused731]":736,"[unused732]":737,"[unused733]":738,"[unused734]":739,"[unused735]":740,"[unused736]":741,"[unused737]":742,"[unused738]":743,"[unused739]":744,"[unused740]":745,"[unused741]":746,"[unused742]":747,"[unused743]":748,"[unused744]":749,"[unused745]":750,"[unused746]":751,"[unused747]":752,"[unused748]":753,"[unused749]":754,"[unused750]":755,"[unused751]":756,"[unused752]":757,"[unused753]":758,"[unused754]":759,"[unused755]":760,"[unused756]":761,"[unused757]":762,"[unused758]":763,"[unused759]":764,"[unused760]":765,"[unused761]":766,"[unused762]":767,"[unused763]":768,"[unused764]":769,"[unused765]":770,"[unused766]":771,"[unused767]":772,"[unused768]":773,"[unused769]":774,"[unused770]":775,"[unused771]":776,"[unused772]":777,"[unused773]":778,"[unused774]":779,"[unused775]":780,"[unused776]":781,"[unused777]":782,"[unused778]":783,"[unused779]":784,"[unused780]":785,"[unused781]":786,"[unused782]":787,"[unused783]":788,"[unused784]":789,"[unused785]":790,"[unused786]":791,"[unused787]":792,"[unused788]":793,"[unused789]":794,"[unused790]":795,"[unused791]":796,"[unused792]":797,"[unused793]":798,"[unused794]":799,"[unused795]":800,"[unused796]":801,"[unused797]":802,"[unused798]":803,"[unused799]":804,"[unused800]":805,"[unused801]":806,"[unused802]":807,"[unused803]":808,"[unused804]":809,"[unused805]":810,"[unused806]":811,"[unused807]":812,"[unused808]":813,"[unused809]":814,"[unused810]":815,"[unused811]":816,"[unused812]":817,"[unused813]":818,"[unused814]":819,"[unused815]":820,"[unused816]":821,"[unused817]":822,"[unused818]":823,"[unused819]":824,"[unused820]":825,"[unused821]":826,"[unused822]":827,"[unused823]":828,"[unused824]":829,"[unused825]":830,"[unused826]":831,"[unused827]":832,"[unused828]":833,"[unused829]":834,"[unused830]":835,"[unused831]":836,"[unused832]":837,"[unused833]":838,"[unused834]":839,"[unused835]":840,"[unused836]":841,"[unused837]":842,"[unused838]":843,"[unused839]":844,"[unused840]":845,"[unused841]":846,"[unused842]":847,"[unused843]":848,"[unused844]":849,"[unused845]":850,"[unused846]":851,"[unused847]":852,"[unused848]":853,"[unused849]":854,"[unused850]":855,"[unused851]":856,"[unused852]":857,"[unused853]":858,"[unused854]":859,"[unused855]":860,"[unused856]":861,"[unused857]":862,"[unused858]":863,"[unused859]":864,"[unused860]":865,"[unused861]":866,"[unused862]":867,"[unused863]":868,"[unused864]":869,"[unused865]":870,"[unused866]":871,"[unused867]":872,"[unused868]":873,"[unused869]":874,"[unused870]":875,"[unused871]":876,"[unused872]":877,"[unused873]":878,"[unused874]":879,"[unused875]":880,"[unused876]":881,"[unused877]":882,"[unused878]":883,"[unused879]":884,"[unused880]":885,"[unused881]":886,"[unused882]":887,"[unused883]":888,"[unused884]":889,"[unused885]":890,"[unused886]":891,"[unused887]":892,"[unused888]":893,"[unused889]":894,"[unused890]":895,"[unused891]":896,"[unused892]":897,"[unused893]":898,"[unused894]":899,"[unused895]":900,"[unused896]":901,"[unused897]":902,"[unused898]":903,"[unused899]":904,"[unused900]":905,"[unused901]":906,"[unused902]":907,"[unused903]":908,"[unused904]":909,"[unused905]":910,"[unused906]":911,"[unused907]":912,"[unused908]":913,"[unused909]":914,"[unused910]":915,"[unused911]":916,"[unused912]":917,"[unused913]":918,"[unused914]":919,"[unused915]":920,"[unused916]":921,"[unused917]":922,"[unused918]":923,"[unused919]":924,"[unused920]":925,"[unused921]":926,"[unused922]":927,"[unused923]":928,"[unused924]":929,"[unused925]":930,"[unused926]":931,"[unused927]":932,"[unused928]":933,"[unused929]":934,"[unused930]":935,"[unused931]":936,"[unused932]":937,"[unused933]":938,"[unused934]":939,"[unused935]":940,"[unused936]":941,"[unused937]":942,"[unused938]":943,"[unused939]":944,"[unused940]":945,"[unused941]":946,"[unused942]":947,"[unused943]":948,"[unused944]":949,"[unused945]":950,"[unused946]":951,"[unused947]":952,"[unused948]":953,"[unused949]":954,"[unused950]":955,"[unused951]":956,"[unused952]":957,"[unused953]":958,"[unused954]":959,"[unused955]":960,"[unused956]":961,"[unused957]":962,"[unused958]":963,"[unused959]":964,"[unused960]":965,"[unused961]":966,"[unused962]":967,"[unused963]":968,"[unused964]":969,"[unused965]":970,"[unused966]":971,"[unused967]":972,"[unused968]":973,"[unused969]":974,"[unused970]":975,"[unused971]":976,"[unused972]":977,"[unused973]":978,"[unused974]":979,"[unused975]":980,"[unused976]":981,"[unused977]":982,"[unused978]":983,"[unused979]":984,"[unused980]":985,"[unused981]":986,"[unused982]":987,"[unused983]":988,"[unused984]":989,"[unused985]":990,"[unused986]":991,"[unused987]":992,"[unused988]":993,"[unused989]":994,"[unused990]":995,"[unused991]":996,"[unused992]":997,"[unused993]":998,"!":999,"\"":1000,"#":1001,"$":1002,"%":1003,"&":1004,"'":1005,"(":1006,")":1007,"*":1008,"+":1009,",":1010,"-":1011,".":1012,"/":1013,"0":1014,"1":1015,"2":1016,"3":1017,"4":1018,"5":1019,"6":1020,"7":1021,"8":1022,"9":1023,":":1024,";":1025,"<":1026,"=":1027,">":1028,"?":1029,"@":1030,"[":1031,"\\":1032,"]":1033,"^":1034,"_":1035,"`":1036,"a":1037,"b":1038,"c":1039,"d":1040,"e":1041,"f":1042,"g":1043,"h":1044,"i":1045,"j":1046,"k":1047,"l":1048,"m":1049,"n":1050,"o":1051,"p":1052,"q":1053,"r":1054,"s":1055,"t":1056,"u":1057,"v":1058,"w":1059,"x":1060,"y":1061,"z":1062,"{":1063,"|":1064,"}":1065,"~":1066,"¡":1067,"¢":1068,"£":1069,"¤":1070,"¥":1071,"¦":1072,"§":1073,"¨":1074,"©":1075,"ª":1076,"«":1077,"¬":1078,"®":1079,"°":1080,"±":1081,"²":1082,"³":1083,"´":1084,"µ":1085,"¶":1086,"·":1087,"¹":1088,"º":1089,"»":1090,"¼":1091,"½":1092,"¾":1093,"¿":1094,"×":1095,"ß":1096,"æ":1097,"ð":1098,"÷":1099,"ø":1100,"þ":1101,"đ":1102,"ħ":1103,"ı":1104,"ł":1105,"ŋ":1106,"œ":1107,"ƒ":1108,"ɐ":1109,"ɑ":1110,"ɒ":1111,"ɔ":1112,"ɕ":1113,"ə":1114,"ɛ":1115,"ɡ":1116,"ɣ":1117,"ɨ":1118,"ɪ":1119,"ɫ":1120,"ɬ":1121,"ɯ":1122,"ɲ":1123,"ɴ":1124,"ɹ":1125,"ɾ":1126,"ʀ":1127,"ʁ":1128,"ʂ":1129,"ʃ":1130,"ʉ":1131,"ʊ":1132,"ʋ":1133,"ʌ":1134,"ʎ":1135,"ʐ":1136,"ʑ":1137,"ʒ":1138,"ʔ":1139,"ʰ":1140,"ʲ":1141,"ʳ":1142,"ʷ":1143,"ʸ":1144,"ʻ":1145,"ʼ":1146,"ʾ":1147,"ʿ":1148,"ˈ":1149,"ː":1150,"ˡ":1151,"ˢ":1152,"ˣ":1153,"ˤ":1154,"α":1155,"β":1156,"γ":1157,"δ":1158,"ε":1159,"ζ":1160,"η":1161,"θ":1162,"ι":1163,"κ":1164,"λ":1165,"μ":1166,"ν":1167,"ξ":1168,"ο":1169,"π":1170,"ρ":1171,"ς":1172,"σ":1173,"τ":1174,"υ":1175,"φ":1176,"χ":1177,"ψ":1178,"ω":1179,"а":1180,"б":1181,"в":1182,"г":1183,"д":1184,"е":1185,"ж":1186,"з":1187,"и":1188,"к":1189,"л":1190,"м":1191,"н":1192,"о":1193,"п":1194,"р":1195,"с":1196,"т":1197,"у":1198,"ф":1199,"х":1200,"ц":1201,"ч":1202,"ш":1203,"щ":1204,"ъ":1205,"ы":1206,"ь":1207,"э":1208,"ю":1209,"я":1210,"ђ":1211,"є":1212,"і":1213,"ј":1214,"љ":1215,"њ":1216,"ћ":1217,"ӏ":1218,"ա":1219,"բ":1220,"գ":1221,"դ":1222,"ե":1223,"թ":1224,"ի":1225,"լ":1226,"կ":1227,"հ":1228,"մ":1229,"յ":1230,"ն":1231,"ո":1232,"պ":1233,"ս":1234,"վ":1235,"տ":1236,"ր":1237,"ւ":1238,"ք":1239,"־":1240,"א":1241,"ב":1242,"ג":1243,"ד":1244,"ה":1245,"ו":1246,"ז":1247,"ח":1248,"ט":1249,"י":1250,"ך":1251,"כ":1252,"ל":1253,"ם":1254,"מ":1255,"ן":1256,"נ":1257,"ס":1258,"ע":1259,"ף":1260,"פ":1261,"ץ":1262,"צ":1263,"ק":1264,"ר":1265,"ש":1266,"ת":1267,"،":1268,"ء":1269,"ا":1270,"ب":1271,"ة":1272,"ت":1273,"ث":1274,"ج":1275,"ح":1276,"خ":1277,"د":1278,"ذ":1279,"ر":1280,"ز":1281,"س":1282,"ش":1283,"ص":1284,"ض":1285,"ط":1286,"ظ":1287,"ع":1288,"غ":1289,"ـ":1290,"ف":1291,"ق":1292,"ك":1293,"ل":1294,"م":1295,"ن":1296,"ه":1297,"و":1298,"ى":1299,"ي":1300,"ٹ":1301,"پ":1302,"چ":1303,"ک":1304,"گ":1305,"ں":1306,"ھ":1307,"ہ":1308,"ی":1309,"ے":1310,"अ":1311,"आ":1312,"उ":1313,"ए":1314,"क":1315,"ख":1316,"ग":1317,"च":1318,"ज":1319,"ट":1320,"ड":1321,"ण":1322,"त":1323,"थ":1324,"द":1325,"ध":1326,"न":1327,"प":1328,"ब":1329,"भ":1330,"म":1331,"य":1332,"र":1333,"ल":1334,"व":1335,"श":1336,"ष":1337,"स":1338,"ह":1339,"ा":1340,"ि":1341,"ी":1342,"ो":1343,"।":1344,"॥":1345,"ং":1346,"অ":1347,"আ":1348,"ই":1349,"উ":1350,"এ":1351,"ও":1352,"ক":1353,"খ":1354,"গ":1355,"চ":1356,"ছ":1357,"জ":1358,"ট":1359,"ড":1360,"ণ":1361,"ত":1362,"থ":1363,"দ":1364,"ধ":1365,"ন":1366,"প":1367,"ব":1368,"ভ":1369,"ম":1370,"য":1371,"র":1372,"ল":1373,"শ":1374,"ষ":1375,"স":1376,"হ":1377,"া":1378,"ি":1379,"ী":1380,"ে":1381,"க":1382,"ச":1383,"ட":1384,"த":1385,"ந":1386,"ன":1387,"ப":1388,"ம":1389,"ய":1390,"ர":1391,"ல":1392,"ள":1393,"வ":1394,"ா":1395,"ி":1396,"ு":1397,"ே":1398,"ை":1399,"ನ":1400,"ರ":1401,"ಾ":1402,"ක":1403,"ය":1404,"ර":1405,"ල":1406,"ව":1407,"ා":1408,"ก":1409,"ง":1410,"ต":1411,"ท":1412,"น":1413,"พ":1414,"ม":1415,"ย":1416,"ร":1417,"ล":1418,"ว":1419,"ส":1420,"อ":1421,"า":1422,"เ":1423,"་":1424,"།":1425,"ག":1426,"ང":1427,"ད":1428,"ན":1429,"པ":1430,"བ":1431,"མ":1432,"འ":1433,"ར":1434,"ལ":1435,"ས":1436,"မ":1437,"ა":1438,"ბ":1439,"გ":1440,"დ":1441,"ე":1442,"ვ":1443,"თ":1444,"ი":1445,"კ":1446,"ლ":1447,"მ":1448,"ნ":1449,"ო":1450,"რ":1451,"ს":1452,"ტ":1453,"უ":1454,"ᄀ":1455,"ᄂ":1456,"ᄃ":1457,"ᄅ":1458,"ᄆ":1459,"ᄇ":1460,"ᄉ":1461,"ᄊ":1462,"ᄋ":1463,"ᄌ":1464,"ᄎ":1465,"ᄏ":1466,"ᄐ":1467,"ᄑ":1468,"ᄒ":1469,"ᅡ":1470,"ᅢ":1471,"ᅥ":1472,"ᅦ":1473,"ᅧ":1474,"ᅩ":1475,"ᅪ":1476,"ᅭ":1477,"ᅮ":1478,"ᅯ":1479,"ᅲ":1480,"ᅳ":1481,"ᅴ":1482,"ᅵ":1483,"ᆨ":1484,"ᆫ":1485,"ᆯ":1486,"ᆷ":1487,"ᆸ":1488,"ᆼ":1489,"ᴬ":1490,"ᴮ":1491,"ᴰ":1492,"ᴵ":1493,"ᴺ":1494,"ᵀ":1495,"ᵃ":1496,"ᵇ":1497,"ᵈ":1498,"ᵉ":1499,"ᵍ":1500,"ᵏ":1501,"ᵐ":1502,"ᵒ":1503,"ᵖ":1504,"ᵗ":1505,"ᵘ":1506,"ᵢ":1507,"ᵣ":1508,"ᵤ":1509,"ᵥ":1510,"ᶜ":1511,"ᶠ":1512,"‐":1513,"‑":1514,"‒":1515,"–":1516,"—":1517,"―":1518,"‖":1519,"‘":1520,"’":1521,"‚":1522,"“":1523,"”":1524,"„":1525,"†":1526,"‡":1527,"•":1528,"…":1529,"‰":1530,"′":1531,"″":1532,"›":1533,"‿":1534,"⁄":1535,"⁰":1536,"ⁱ":1537,"⁴":1538,"⁵":1539,"⁶":1540,"⁷":1541,"⁸":1542,"⁹":1543,"⁺":1544,"⁻":1545,"ⁿ":1546,"₀":1547,"₁":1548,"₂":1549,"₃":1550,"₄":1551,"₅":1552,"₆":1553,"₇":1554,"₈":1555,"₉":1556,"₊":1557,"₍":1558,"₎":1559,"ₐ":1560,"ₑ":1561,"ₒ":1562,"ₓ":1563,"ₕ":1564,"ₖ":1565,"ₗ":1566,"ₘ":1567,"ₙ":1568,"ₚ":1569,"ₛ":1570,"ₜ":1571,"₤":1572,"₩":1573,"€":1574,"₱":1575,"₹":1576,"ℓ":1577,"№":1578,"ℝ":1579,"™":1580,"⅓":1581,"⅔":1582,"←":1583,"↑":1584,"→":1585,"↓":1586,"↔":1587,"↦":1588,"⇄":1589,"⇌":1590,"⇒":1591,"∂":1592,"∅":1593,"∆":1594,"∇":1595,"∈":1596,"−":1597,"∗":1598,"∘":1599,"√":1600,"∞":1601,"∧":1602,"∨":1603,"∩":1604,"∪":1605,"≈":1606,"≡":1607,"≤":1608,"≥":1609,"⊂":1610,"⊆":1611,"⊕":1612,"⊗":1613,"⋅":1614,"─":1615,"│":1616,"■":1617,"▪":1618,"●":1619,"★":1620,"☆":1621,"☉":1622,"♠":1623,"♣":1624,"♥":1625,"♦":1626,"♭":1627,"♯":1628,"⟨":1629,"⟩":1630,"ⱼ":1631,"⺩":1632,"⺼":1633,"⽥":1634,"、":1635,"。":1636,"〈":1637,"〉":1638,"《":1639,"》":1640,"「":1641,"」":1642,"『":1643,"』":1644,"〜":1645,"あ":1646,"い":1647,"う":1648,"え":1649,"お":1650,"か":1651,"き":1652,"く":1653,"け":1654,"こ":1655,"さ":1656,"し":1657,"す":1658,"せ":1659,"そ":1660,"た":1661,"ち":1662,"っ":1663,"つ":1664,"て":1665,"と":1666,"な":1667,"に":1668,"ぬ":1669,"ね":1670,"の":1671,"は":1672,"ひ":1673,"ふ":1674,"へ":1675,"ほ":1676,"ま":1677,"み":1678,"む":1679,"め":1680,"も":1681,"や":1682,"ゆ":1683,"よ":1684,"ら":1685,"り":1686,"る":1687,"れ":1688,"ろ":1689,"を":1690,"ん":1691,"ァ":1692,"ア":1693,"ィ":1694,"イ":1695,"ウ":1696,"ェ":1697,"エ":1698,"オ":1699,"カ":1700,"キ":1701,"ク":1702,"ケ":1703,"コ":1704,"サ":1705,"シ":1706,"ス":1707,"セ":1708,"タ":1709,"チ":1710,"ッ":1711,"ツ":1712,"テ":1713,"ト":1714,"ナ":1715,"ニ":1716,"ノ":1717,"ハ":1718,"ヒ":1719,"フ":1720,"ヘ":1721,"ホ":1722,"マ":1723,"ミ":1724,"ム":1725,"メ":1726,"モ":1727,"ャ":1728,"ュ":1729,"ョ":1730,"ラ":1731,"リ":1732,"ル":1733,"レ":1734,"ロ":1735,"ワ":1736,"ン":1737,"・":1738,"ー":1739,"一":1740,"三":1741,"上":1742,"下":1743,"不":1744,"世":1745,"中":1746,"主":1747,"久":1748,"之":1749,"也":1750,"事":1751,"二":1752,"五":1753,"井":1754,"京":1755,"人":1756,"亻":1757,"仁":1758,"介":1759,"代":1760,"仮":1761,"伊":1762,"会":1763,"佐":1764,"侍":1765,"保":1766,"信":1767,"健":1768,"元":1769,"光":1770,"八":1771,"公":1772,"内":1773,"出":1774,"分":1775,"前":1776,"劉":1777,"力":1778,"加":1779,"勝":1780,"北":1781,"区":1782,"十":1783,"千":1784,"南":1785,"博":1786,"原":1787,"口":1788,"古":1789,"史":1790,"司":1791,"合":1792,"吉":1793,"同":1794,"名":1795,"和":1796,"囗":1797,"四":1798,"国":1799,"國":1800,"土":1801,"地":1802,"坂":1803,"城":1804,"堂":1805,"場":1806,"士":1807,"夏":1808,"外":1809,"大":1810,"天":1811,"太":1812,"夫":1813,"奈":1814,"女":1815,"子":1816,"学":1817,"宀":1818,"宇":1819,"安":1820,"宗":1821,"定":1822,"宣":1823,"宮":1824,"家":1825,"宿":1826,"寺":1827,"將":1828,"小":1829,"尚":1830,"山":1831,"岡":1832,"島":1833,"崎":1834,"川":1835,"州":1836,"巿":1837,"帝":1838,"平":1839,"年":1840,"幸":1841,"广":1842,"弘":1843,"張":1844,"彳":1845,"後":1846,"御":1847,"德":1848,"心":1849,"忄":1850,"志":1851,"忠":1852,"愛":1853,"成":1854,"我":1855,"戦":1856,"戸":1857,"手":1858,"扌":1859,"政":1860,"文":1861,"新":1862,"方":1863,"日":1864,"明":1865,"星":1866,"春":1867,"昭":1868,"智":1869,"曲":1870,"書":1871,"月":1872,"有":1873,"朝":1874,"木":1875,"本":1876,"李":1877,"村":1878,"東":1879,"松":1880,"林":1881,"森":1882,"楊":1883,"樹":1884,"橋":1885,"歌":1886,"止":1887,"正":1888,"武":1889,"比":1890,"氏":1891,"民":1892,"水":1893,"氵":1894,"氷":1895,"永":1896,"江":1897,"沢":1898,"河":1899,"治":1900,"法":1901,"海":1902,"清":1903,"漢":1904,"瀬":1905,"火":1906,"版":1907,"犬":1908,"王":1909,"生":1910,"田":1911,"男":1912,"疒":1913,"発":1914,"白":1915,"的":1916,"皇":1917,"目":1918,"相":1919,"省":1920,"真":1921,"石":1922,"示":1923,"社":1924,"神":1925,"福":1926,"禾":1927,"秀":1928,"秋":1929,"空":1930,"立":1931,"章":1932,"竹":1933,"糹":1934,"美":1935,"義":1936,"耳":1937,"良":1938,"艹":1939,"花":1940,"英":1941,"華":1942,"葉":1943,"藤":1944,"行":1945,"街":1946,"西":1947,"見":1948,"訁":1949,"語":1950,"谷":1951,"貝":1952,"貴":1953,"車":1954,"軍":1955,"辶":1956,"道":1957,"郎":1958,"郡":1959,"部":1960,"都":1961,"里":1962,"野":1963,"金":1964,"鈴":1965,"镇":1966,"長":1967,"門":1968,"間":1969,"阝":1970,"阿":1971,"陳":1972,"陽":1973,"雄":1974,"青":1975,"面":1976,"風":1977,"食":1978,"香":1979,"馬":1980,"高":1981,"龍":1982,"龸":1983,"fi":1984,"fl":1985,"!":1986,"(":1987,")":1988,",":1989,"-":1990,".":1991,"/":1992,":":1993,"?":1994,"~":1995,"the":1996,"of":1997,"and":1998,"in":1999,"to":2000,"was":2001,"he":2002,"is":2003,"as":2004,"for":2005,"on":2006,"with":2007,"that":2008,"it":2009,"his":2010,"by":2011,"at":2012,"from":2013,"her":2014,"##s":2015,"she":2016,"you":2017,"had":2018,"an":2019,"were":2020,"but":2021,"be":2022,"this":2023,"are":2024,"not":2025,"my":2026,"they":2027,"one":2028,"which":2029,"or":2030,"have":2031,"him":2032,"me":2033,"first":2034,"all":2035,"also":2036,"their":2037,"has":2038,"up":2039,"who":2040,"out":2041,"been":2042,"when":2043,"after":2044,"there":2045,"into":2046,"new":2047,"two":2048,"its":2049,"##a":2050,"time":2051,"would":2052,"no":2053,"what":2054,"about":2055,"said":2056,"we":2057,"over":2058,"then":2059,"other":2060,"so":2061,"more":2062,"##e":2063,"can":2064,"if":2065,"like":2066,"back":2067,"them":2068,"only":2069,"some":2070,"could":2071,"##i":2072,"where":2073,"just":2074,"##ing":2075,"during":2076,"before":2077,"##n":2078,"do":2079,"##o":2080,"made":2081,"school":2082,"through":2083,"than":2084,"now":2085,"years":2086,"most":2087,"world":2088,"may":2089,"between":2090,"down":2091,"well":2092,"three":2093,"##d":2094,"year":2095,"while":2096,"will":2097,"##ed":2098,"##r":2099,"##y":2100,"later":2101,"##t":2102,"city":2103,"under":2104,"around":2105,"did":2106,"such":2107,"being":2108,"used":2109,"state":2110,"people":2111,"part":2112,"know":2113,"against":2114,"your":2115,"many":2116,"second":2117,"university":2118,"both":2119,"national":2120,"##er":2121,"these":2122,"don":2123,"known":2124,"off":2125,"way":2126,"until":2127,"re":2128,"how":2129,"even":2130,"get":2131,"head":2132,"...":2133,"didn":2134,"##ly":2135,"team":2136,"american":2137,"because":2138,"de":2139,"##l":2140,"born":2141,"united":2142,"film":2143,"since":2144,"still":2145,"long":2146,"work":2147,"south":2148,"us":2149,"became":2150,"any":2151,"high":2152,"again":2153,"day":2154,"family":2155,"see":2156,"right":2157,"man":2158,"eyes":2159,"house":2160,"season":2161,"war":2162,"states":2163,"including":2164,"took":2165,"life":2166,"north":2167,"same":2168,"each":2169,"called":2170,"name":2171,"much":2172,"place":2173,"however":2174,"go":2175,"four":2176,"group":2177,"another":2178,"found":2179,"won":2180,"area":2181,"here":2182,"going":2183,"10":2184,"away":2185,"series":2186,"left":2187,"home":2188,"music":2189,"best":2190,"make":2191,"hand":2192,"number":2193,"company":2194,"several":2195,"never":2196,"last":2197,"john":2198,"000":2199,"very":2200,"album":2201,"take":2202,"end":2203,"good":2204,"too":2205,"following":2206,"released":2207,"game":2208,"played":2209,"little":2210,"began":2211,"district":2212,"##m":2213,"old":2214,"want":2215,"those":2216,"side":2217,"held":2218,"own":2219,"early":2220,"county":2221,"ll":2222,"league":2223,"use":2224,"west":2225,"##u":2226,"face":2227,"think":2228,"##es":2229,"2010":2230,"government":2231,"##h":2232,"march":2233,"came":2234,"small":2235,"general":2236,"town":2237,"june":2238,"##on":2239,"line":2240,"based":2241,"something":2242,"##k":2243,"september":2244,"thought":2245,"looked":2246,"along":2247,"international":2248,"2011":2249,"air":2250,"july":2251,"club":2252,"went":2253,"january":2254,"october":2255,"our":2256,"august":2257,"april":2258,"york":2259,"12":2260,"few":2261,"2012":2262,"2008":2263,"east":2264,"show":2265,"member":2266,"college":2267,"2009":2268,"father":2269,"public":2270,"##us":2271,"come":2272,"men":2273,"five":2274,"set":2275,"station":2276,"church":2277,"##c":2278,"next":2279,"former":2280,"november":2281,"room":2282,"party":2283,"located":2284,"december":2285,"2013":2286,"age":2287,"got":2288,"2007":2289,"##g":2290,"system":2291,"let":2292,"love":2293,"2006":2294,"though":2295,"every":2296,"2014":2297,"look":2298,"song":2299,"water":2300,"century":2301,"without":2302,"body":2303,"black":2304,"night":2305,"within":2306,"great":2307,"women":2308,"single":2309,"ve":2310,"building":2311,"large":2312,"population":2313,"river":2314,"named":2315,"band":2316,"white":2317,"started":2318,"##an":2319,"once":2320,"15":2321,"20":2322,"should":2323,"18":2324,"2015":2325,"service":2326,"top":2327,"built":2328,"british":2329,"open":2330,"death":2331,"king":2332,"moved":2333,"local":2334,"times":2335,"children":2336,"february":2337,"book":2338,"why":2339,"11":2340,"door":2341,"need":2342,"president":2343,"order":2344,"final":2345,"road":2346,"wasn":2347,"although":2348,"due":2349,"major":2350,"died":2351,"village":2352,"third":2353,"knew":2354,"2016":2355,"asked":2356,"turned":2357,"st":2358,"wanted":2359,"say":2360,"##p":2361,"together":2362,"received":2363,"main":2364,"son":2365,"served":2366,"different":2367,"##en":2368,"behind":2369,"himself":2370,"felt":2371,"members":2372,"power":2373,"football":2374,"law":2375,"voice":2376,"play":2377,"##in":2378,"near":2379,"park":2380,"history":2381,"30":2382,"having":2383,"2005":2384,"16":2385,"##man":2386,"saw":2387,"mother":2388,"##al":2389,"army":2390,"point":2391,"front":2392,"help":2393,"english":2394,"street":2395,"art":2396,"late":2397,"hands":2398,"games":2399,"award":2400,"##ia":2401,"young":2402,"14":2403,"put":2404,"published":2405,"country":2406,"division":2407,"across":2408,"told":2409,"13":2410,"often":2411,"ever":2412,"french":2413,"london":2414,"center":2415,"six":2416,"red":2417,"2017":2418,"led":2419,"days":2420,"include":2421,"light":2422,"25":2423,"find":2424,"tell":2425,"among":2426,"species":2427,"really":2428,"according":2429,"central":2430,"half":2431,"2004":2432,"form":2433,"original":2434,"gave":2435,"office":2436,"making":2437,"enough":2438,"lost":2439,"full":2440,"opened":2441,"must":2442,"included":2443,"live":2444,"given":2445,"german":2446,"player":2447,"run":2448,"business":2449,"woman":2450,"community":2451,"cup":2452,"might":2453,"million":2454,"land":2455,"2000":2456,"court":2457,"development":2458,"17":2459,"short":2460,"round":2461,"ii":2462,"km":2463,"seen":2464,"class":2465,"story":2466,"always":2467,"become":2468,"sure":2469,"research":2470,"almost":2471,"director":2472,"council":2473,"la":2474,"##2":2475,"career":2476,"things":2477,"using":2478,"island":2479,"##z":2480,"couldn":2481,"car":2482,"##is":2483,"24":2484,"close":2485,"force":2486,"##1":2487,"better":2488,"free":2489,"support":2490,"control":2491,"field":2492,"students":2493,"2003":2494,"education":2495,"married":2496,"##b":2497,"nothing":2498,"worked":2499,"others":2500,"record":2501,"big":2502,"inside":2503,"level":2504,"anything":2505,"continued":2506,"give":2507,"james":2508,"##3":2509,"military":2510,"established":2511,"non":2512,"returned":2513,"feel":2514,"does":2515,"title":2516,"written":2517,"thing":2518,"feet":2519,"william":2520,"far":2521,"co":2522,"association":2523,"hard":2524,"already":2525,"2002":2526,"##ra":2527,"championship":2528,"human":2529,"western":2530,"100":2531,"##na":2532,"department":2533,"hall":2534,"role":2535,"various":2536,"production":2537,"21":2538,"19":2539,"heart":2540,"2001":2541,"living":2542,"fire":2543,"version":2544,"##ers":2545,"##f":2546,"television":2547,"royal":2548,"##4":2549,"produced":2550,"working":2551,"act":2552,"case":2553,"society":2554,"region":2555,"present":2556,"radio":2557,"period":2558,"looking":2559,"least":2560,"total":2561,"keep":2562,"england":2563,"wife":2564,"program":2565,"per":2566,"brother":2567,"mind":2568,"special":2569,"22":2570,"##le":2571,"am":2572,"works":2573,"soon":2574,"##6":2575,"political":2576,"george":2577,"services":2578,"taken":2579,"created":2580,"##7":2581,"further":2582,"able":2583,"reached":2584,"david":2585,"union":2586,"joined":2587,"upon":2588,"done":2589,"important":2590,"social":2591,"information":2592,"either":2593,"##ic":2594,"##x":2595,"appeared":2596,"position":2597,"ground":2598,"lead":2599,"rock":2600,"dark":2601,"election":2602,"23":2603,"board":2604,"france":2605,"hair":2606,"course":2607,"arms":2608,"site":2609,"police":2610,"girl":2611,"instead":2612,"real":2613,"sound":2614,"##v":2615,"words":2616,"moment":2617,"##te":2618,"someone":2619,"##8":2620,"summer":2621,"project":2622,"announced":2623,"san":2624,"less":2625,"wrote":2626,"past":2627,"followed":2628,"##5":2629,"blue":2630,"founded":2631,"al":2632,"finally":2633,"india":2634,"taking":2635,"records":2636,"america":2637,"##ne":2638,"1999":2639,"design":2640,"considered":2641,"northern":2642,"god":2643,"stop":2644,"battle":2645,"toward":2646,"european":2647,"outside":2648,"described":2649,"track":2650,"today":2651,"playing":2652,"language":2653,"28":2654,"call":2655,"26":2656,"heard":2657,"professional":2658,"low":2659,"australia":2660,"miles":2661,"california":2662,"win":2663,"yet":2664,"green":2665,"##ie":2666,"trying":2667,"blood":2668,"##ton":2669,"southern":2670,"science":2671,"maybe":2672,"everything":2673,"match":2674,"square":2675,"27":2676,"mouth":2677,"video":2678,"race":2679,"recorded":2680,"leave":2681,"above":2682,"##9":2683,"daughter":2684,"points":2685,"space":2686,"1998":2687,"museum":2688,"change":2689,"middle":2690,"common":2691,"##0":2692,"move":2693,"tv":2694,"post":2695,"##ta":2696,"lake":2697,"seven":2698,"tried":2699,"elected":2700,"closed":2701,"ten":2702,"paul":2703,"minister":2704,"##th":2705,"months":2706,"start":2707,"chief":2708,"return":2709,"canada":2710,"person":2711,"sea":2712,"release":2713,"similar":2714,"modern":2715,"brought":2716,"rest":2717,"hit":2718,"formed":2719,"mr":2720,"##la":2721,"1997":2722,"floor":2723,"event":2724,"doing":2725,"thomas":2726,"1996":2727,"robert":2728,"care":2729,"killed":2730,"training":2731,"star":2732,"week":2733,"needed":2734,"turn":2735,"finished":2736,"railway":2737,"rather":2738,"news":2739,"health":2740,"sent":2741,"example":2742,"ran":2743,"term":2744,"michael":2745,"coming":2746,"currently":2747,"yes":2748,"forces":2749,"despite":2750,"gold":2751,"areas":2752,"50":2753,"stage":2754,"fact":2755,"29":2756,"dead":2757,"says":2758,"popular":2759,"2018":2760,"originally":2761,"germany":2762,"probably":2763,"developed":2764,"result":2765,"pulled":2766,"friend":2767,"stood":2768,"money":2769,"running":2770,"mi":2771,"signed":2772,"word":2773,"songs":2774,"child":2775,"eventually":2776,"met":2777,"tour":2778,"average":2779,"teams":2780,"minutes":2781,"festival":2782,"current":2783,"deep":2784,"kind":2785,"1995":2786,"decided":2787,"usually":2788,"eastern":2789,"seemed":2790,"##ness":2791,"episode":2792,"bed":2793,"added":2794,"table":2795,"indian":2796,"private":2797,"charles":2798,"route":2799,"available":2800,"idea":2801,"throughout":2802,"centre":2803,"addition":2804,"appointed":2805,"style":2806,"1994":2807,"books":2808,"eight":2809,"construction":2810,"press":2811,"mean":2812,"wall":2813,"friends":2814,"remained":2815,"schools":2816,"study":2817,"##ch":2818,"##um":2819,"institute":2820,"oh":2821,"chinese":2822,"sometimes":2823,"events":2824,"possible":2825,"1992":2826,"australian":2827,"type":2828,"brown":2829,"forward":2830,"talk":2831,"process":2832,"food":2833,"debut":2834,"seat":2835,"performance":2836,"committee":2837,"features":2838,"character":2839,"arts":2840,"herself":2841,"else":2842,"lot":2843,"strong":2844,"russian":2845,"range":2846,"hours":2847,"peter":2848,"arm":2849,"##da":2850,"morning":2851,"dr":2852,"sold":2853,"##ry":2854,"quickly":2855,"directed":2856,"1993":2857,"guitar":2858,"china":2859,"##w":2860,"31":2861,"list":2862,"##ma":2863,"performed":2864,"media":2865,"uk":2866,"players":2867,"smile":2868,"##rs":2869,"myself":2870,"40":2871,"placed":2872,"coach":2873,"province":2874,"towards":2875,"wouldn":2876,"leading":2877,"whole":2878,"boy":2879,"official":2880,"designed":2881,"grand":2882,"census":2883,"##el":2884,"europe":2885,"attack":2886,"japanese":2887,"henry":2888,"1991":2889,"##re":2890,"##os":2891,"cross":2892,"getting":2893,"alone":2894,"action":2895,"lower":2896,"network":2897,"wide":2898,"washington":2899,"japan":2900,"1990":2901,"hospital":2902,"believe":2903,"changed":2904,"sister":2905,"##ar":2906,"hold":2907,"gone":2908,"sir":2909,"hadn":2910,"ship":2911,"##ka":2912,"studies":2913,"academy":2914,"shot":2915,"rights":2916,"below":2917,"base":2918,"bad":2919,"involved":2920,"kept":2921,"largest":2922,"##ist":2923,"bank":2924,"future":2925,"especially":2926,"beginning":2927,"mark":2928,"movement":2929,"section":2930,"female":2931,"magazine":2932,"plan":2933,"professor":2934,"lord":2935,"longer":2936,"##ian":2937,"sat":2938,"walked":2939,"hill":2940,"actually":2941,"civil":2942,"energy":2943,"model":2944,"families":2945,"size":2946,"thus":2947,"aircraft":2948,"completed":2949,"includes":2950,"data":2951,"captain":2952,"##or":2953,"fight":2954,"vocals":2955,"featured":2956,"richard":2957,"bridge":2958,"fourth":2959,"1989":2960,"officer":2961,"stone":2962,"hear":2963,"##ism":2964,"means":2965,"medical":2966,"groups":2967,"management":2968,"self":2969,"lips":2970,"competition":2971,"entire":2972,"lived":2973,"technology":2974,"leaving":2975,"federal":2976,"tournament":2977,"bit":2978,"passed":2979,"hot":2980,"independent":2981,"awards":2982,"kingdom":2983,"mary":2984,"spent":2985,"fine":2986,"doesn":2987,"reported":2988,"##ling":2989,"jack":2990,"fall":2991,"raised":2992,"itself":2993,"stay":2994,"true":2995,"studio":2996,"1988":2997,"sports":2998,"replaced":2999,"paris":3000,"systems":3001,"saint":3002,"leader":3003,"theatre":3004,"whose":3005,"market":3006,"capital":3007,"parents":3008,"spanish":3009,"canadian":3010,"earth":3011,"##ity":3012,"cut":3013,"degree":3014,"writing":3015,"bay":3016,"christian":3017,"awarded":3018,"natural":3019,"higher":3020,"bill":3021,"##as":3022,"coast":3023,"provided":3024,"previous":3025,"senior":3026,"ft":3027,"valley":3028,"organization":3029,"stopped":3030,"onto":3031,"countries":3032,"parts":3033,"conference":3034,"queen":3035,"security":3036,"interest":3037,"saying":3038,"allowed":3039,"master":3040,"earlier":3041,"phone":3042,"matter":3043,"smith":3044,"winning":3045,"try":3046,"happened":3047,"moving":3048,"campaign":3049,"los":3050,"##ley":3051,"breath":3052,"nearly":3053,"mid":3054,"1987":3055,"certain":3056,"girls":3057,"date":3058,"italian":3059,"african":3060,"standing":3061,"fell":3062,"artist":3063,"##ted":3064,"shows":3065,"deal":3066,"mine":3067,"industry":3068,"1986":3069,"##ng":3070,"everyone":3071,"republic":3072,"provide":3073,"collection":3074,"library":3075,"student":3076,"##ville":3077,"primary":3078,"owned":3079,"older":3080,"via":3081,"heavy":3082,"1st":3083,"makes":3084,"##able":3085,"attention":3086,"anyone":3087,"africa":3088,"##ri":3089,"stated":3090,"length":3091,"ended":3092,"fingers":3093,"command":3094,"staff":3095,"skin":3096,"foreign":3097,"opening":3098,"governor":3099,"okay":3100,"medal":3101,"kill":3102,"sun":3103,"cover":3104,"job":3105,"1985":3106,"introduced":3107,"chest":3108,"hell":3109,"feeling":3110,"##ies":3111,"success":3112,"meet":3113,"reason":3114,"standard":3115,"meeting":3116,"novel":3117,"1984":3118,"trade":3119,"source":3120,"buildings":3121,"##land":3122,"rose":3123,"guy":3124,"goal":3125,"##ur":3126,"chapter":3127,"native":3128,"husband":3129,"previously":3130,"unit":3131,"limited":3132,"entered":3133,"weeks":3134,"producer":3135,"operations":3136,"mountain":3137,"takes":3138,"covered":3139,"forced":3140,"related":3141,"roman":3142,"complete":3143,"successful":3144,"key":3145,"texas":3146,"cold":3147,"##ya":3148,"channel":3149,"1980":3150,"traditional":3151,"films":3152,"dance":3153,"clear":3154,"approximately":3155,"500":3156,"nine":3157,"van":3158,"prince":3159,"question":3160,"active":3161,"tracks":3162,"ireland":3163,"regional":3164,"silver":3165,"author":3166,"personal":3167,"sense":3168,"operation":3169,"##ine":3170,"economic":3171,"1983":3172,"holding":3173,"twenty":3174,"isbn":3175,"additional":3176,"speed":3177,"hour":3178,"edition":3179,"regular":3180,"historic":3181,"places":3182,"whom":3183,"shook":3184,"movie":3185,"km²":3186,"secretary":3187,"prior":3188,"report":3189,"chicago":3190,"read":3191,"foundation":3192,"view":3193,"engine":3194,"scored":3195,"1982":3196,"units":3197,"ask":3198,"airport":3199,"property":3200,"ready":3201,"immediately":3202,"lady":3203,"month":3204,"listed":3205,"contract":3206,"##de":3207,"manager":3208,"themselves":3209,"lines":3210,"##ki":3211,"navy":3212,"writer":3213,"meant":3214,"##ts":3215,"runs":3216,"##ro":3217,"practice":3218,"championships":3219,"singer":3220,"glass":3221,"commission":3222,"required":3223,"forest":3224,"starting":3225,"culture":3226,"generally":3227,"giving":3228,"access":3229,"attended":3230,"test":3231,"couple":3232,"stand":3233,"catholic":3234,"martin":3235,"caught":3236,"executive":3237,"##less":3238,"eye":3239,"##ey":3240,"thinking":3241,"chair":3242,"quite":3243,"shoulder":3244,"1979":3245,"hope":3246,"decision":3247,"plays":3248,"defeated":3249,"municipality":3250,"whether":3251,"structure":3252,"offered":3253,"slowly":3254,"pain":3255,"ice":3256,"direction":3257,"##ion":3258,"paper":3259,"mission":3260,"1981":3261,"mostly":3262,"200":3263,"noted":3264,"individual":3265,"managed":3266,"nature":3267,"lives":3268,"plant":3269,"##ha":3270,"helped":3271,"except":3272,"studied":3273,"computer":3274,"figure":3275,"relationship":3276,"issue":3277,"significant":3278,"loss":3279,"die":3280,"smiled":3281,"gun":3282,"ago":3283,"highest":3284,"1972":3285,"##am":3286,"male":3287,"bring":3288,"goals":3289,"mexico":3290,"problem":3291,"distance":3292,"commercial":3293,"completely":3294,"location":3295,"annual":3296,"famous":3297,"drive":3298,"1976":3299,"neck":3300,"1978":3301,"surface":3302,"caused":3303,"italy":3304,"understand":3305,"greek":3306,"highway":3307,"wrong":3308,"hotel":3309,"comes":3310,"appearance":3311,"joseph":3312,"double":3313,"issues":3314,"musical":3315,"companies":3316,"castle":3317,"income":3318,"review":3319,"assembly":3320,"bass":3321,"initially":3322,"parliament":3323,"artists":3324,"experience":3325,"1974":3326,"particular":3327,"walk":3328,"foot":3329,"engineering":3330,"talking":3331,"window":3332,"dropped":3333,"##ter":3334,"miss":3335,"baby":3336,"boys":3337,"break":3338,"1975":3339,"stars":3340,"edge":3341,"remember":3342,"policy":3343,"carried":3344,"train":3345,"stadium":3346,"bar":3347,"sex":3348,"angeles":3349,"evidence":3350,"##ge":3351,"becoming":3352,"assistant":3353,"soviet":3354,"1977":3355,"upper":3356,"step":3357,"wing":3358,"1970":3359,"youth":3360,"financial":3361,"reach":3362,"##ll":3363,"actor":3364,"numerous":3365,"##se":3366,"##st":3367,"nodded":3368,"arrived":3369,"##ation":3370,"minute":3371,"##nt":3372,"believed":3373,"sorry":3374,"complex":3375,"beautiful":3376,"victory":3377,"associated":3378,"temple":3379,"1968":3380,"1973":3381,"chance":3382,"perhaps":3383,"metal":3384,"##son":3385,"1945":3386,"bishop":3387,"##et":3388,"lee":3389,"launched":3390,"particularly":3391,"tree":3392,"le":3393,"retired":3394,"subject":3395,"prize":3396,"contains":3397,"yeah":3398,"theory":3399,"empire":3400,"##ce":3401,"suddenly":3402,"waiting":3403,"trust":3404,"recording":3405,"##to":3406,"happy":3407,"terms":3408,"camp":3409,"champion":3410,"1971":3411,"religious":3412,"pass":3413,"zealand":3414,"names":3415,"2nd":3416,"port":3417,"ancient":3418,"tom":3419,"corner":3420,"represented":3421,"watch":3422,"legal":3423,"anti":3424,"justice":3425,"cause":3426,"watched":3427,"brothers":3428,"45":3429,"material":3430,"changes":3431,"simply":3432,"response":3433,"louis":3434,"fast":3435,"##ting":3436,"answer":3437,"60":3438,"historical":3439,"1969":3440,"stories":3441,"straight":3442,"create":3443,"feature":3444,"increased":3445,"rate":3446,"administration":3447,"virginia":3448,"el":3449,"activities":3450,"cultural":3451,"overall":3452,"winner":3453,"programs":3454,"basketball":3455,"legs":3456,"guard":3457,"beyond":3458,"cast":3459,"doctor":3460,"mm":3461,"flight":3462,"results":3463,"remains":3464,"cost":3465,"effect":3466,"winter":3467,"##ble":3468,"larger":3469,"islands":3470,"problems":3471,"chairman":3472,"grew":3473,"commander":3474,"isn":3475,"1967":3476,"pay":3477,"failed":3478,"selected":3479,"hurt":3480,"fort":3481,"box":3482,"regiment":3483,"majority":3484,"journal":3485,"35":3486,"edward":3487,"plans":3488,"##ke":3489,"##ni":3490,"shown":3491,"pretty":3492,"irish":3493,"characters":3494,"directly":3495,"scene":3496,"likely":3497,"operated":3498,"allow":3499,"spring":3500,"##j":3501,"junior":3502,"matches":3503,"looks":3504,"mike":3505,"houses":3506,"fellow":3507,"##tion":3508,"beach":3509,"marriage":3510,"##ham":3511,"##ive":3512,"rules":3513,"oil":3514,"65":3515,"florida":3516,"expected":3517,"nearby":3518,"congress":3519,"sam":3520,"peace":3521,"recent":3522,"iii":3523,"wait":3524,"subsequently":3525,"cell":3526,"##do":3527,"variety":3528,"serving":3529,"agreed":3530,"please":3531,"poor":3532,"joe":3533,"pacific":3534,"attempt":3535,"wood":3536,"democratic":3537,"piece":3538,"prime":3539,"##ca":3540,"rural":3541,"mile":3542,"touch":3543,"appears":3544,"township":3545,"1964":3546,"1966":3547,"soldiers":3548,"##men":3549,"##ized":3550,"1965":3551,"pennsylvania":3552,"closer":3553,"fighting":3554,"claimed":3555,"score":3556,"jones":3557,"physical":3558,"editor":3559,"##ous":3560,"filled":3561,"genus":3562,"specific":3563,"sitting":3564,"super":3565,"mom":3566,"##va":3567,"therefore":3568,"supported":3569,"status":3570,"fear":3571,"cases":3572,"store":3573,"meaning":3574,"wales":3575,"minor":3576,"spain":3577,"tower":3578,"focus":3579,"vice":3580,"frank":3581,"follow":3582,"parish":3583,"separate":3584,"golden":3585,"horse":3586,"fifth":3587,"remaining":3588,"branch":3589,"32":3590,"presented":3591,"stared":3592,"##id":3593,"uses":3594,"secret":3595,"forms":3596,"##co":3597,"baseball":3598,"exactly":3599,"##ck":3600,"choice":3601,"note":3602,"discovered":3603,"travel":3604,"composed":3605,"truth":3606,"russia":3607,"ball":3608,"color":3609,"kiss":3610,"dad":3611,"wind":3612,"continue":3613,"ring":3614,"referred":3615,"numbers":3616,"digital":3617,"greater":3618,"##ns":3619,"metres":3620,"slightly":3621,"direct":3622,"increase":3623,"1960":3624,"responsible":3625,"crew":3626,"rule":3627,"trees":3628,"troops":3629,"##no":3630,"broke":3631,"goes":3632,"individuals":3633,"hundred":3634,"weight":3635,"creek":3636,"sleep":3637,"memory":3638,"defense":3639,"provides":3640,"ordered":3641,"code":3642,"value":3643,"jewish":3644,"windows":3645,"1944":3646,"safe":3647,"judge":3648,"whatever":3649,"corps":3650,"realized":3651,"growing":3652,"pre":3653,"##ga":3654,"cities":3655,"alexander":3656,"gaze":3657,"lies":3658,"spread":3659,"scott":3660,"letter":3661,"showed":3662,"situation":3663,"mayor":3664,"transport":3665,"watching":3666,"workers":3667,"extended":3668,"##li":3669,"expression":3670,"normal":3671,"##ment":3672,"chart":3673,"multiple":3674,"border":3675,"##ba":3676,"host":3677,"##ner":3678,"daily":3679,"mrs":3680,"walls":3681,"piano":3682,"##ko":3683,"heat":3684,"cannot":3685,"##ate":3686,"earned":3687,"products":3688,"drama":3689,"era":3690,"authority":3691,"seasons":3692,"join":3693,"grade":3694,"##io":3695,"sign":3696,"difficult":3697,"machine":3698,"1963":3699,"territory":3700,"mainly":3701,"##wood":3702,"stations":3703,"squadron":3704,"1962":3705,"stepped":3706,"iron":3707,"19th":3708,"##led":3709,"serve":3710,"appear":3711,"sky":3712,"speak":3713,"broken":3714,"charge":3715,"knowledge":3716,"kilometres":3717,"removed":3718,"ships":3719,"article":3720,"campus":3721,"simple":3722,"##ty":3723,"pushed":3724,"britain":3725,"##ve":3726,"leaves":3727,"recently":3728,"cd":3729,"soft":3730,"boston":3731,"latter":3732,"easy":3733,"acquired":3734,"poland":3735,"##sa":3736,"quality":3737,"officers":3738,"presence":3739,"planned":3740,"nations":3741,"mass":3742,"broadcast":3743,"jean":3744,"share":3745,"image":3746,"influence":3747,"wild":3748,"offer":3749,"emperor":3750,"electric":3751,"reading":3752,"headed":3753,"ability":3754,"promoted":3755,"yellow":3756,"ministry":3757,"1942":3758,"throat":3759,"smaller":3760,"politician":3761,"##by":3762,"latin":3763,"spoke":3764,"cars":3765,"williams":3766,"males":3767,"lack":3768,"pop":3769,"80":3770,"##ier":3771,"acting":3772,"seeing":3773,"consists":3774,"##ti":3775,"estate":3776,"1961":3777,"pressure":3778,"johnson":3779,"newspaper":3780,"jr":3781,"chris":3782,"olympics":3783,"online":3784,"conditions":3785,"beat":3786,"elements":3787,"walking":3788,"vote":3789,"##field":3790,"needs":3791,"carolina":3792,"text":3793,"featuring":3794,"global":3795,"block":3796,"shirt":3797,"levels":3798,"francisco":3799,"purpose":3800,"females":3801,"et":3802,"dutch":3803,"duke":3804,"ahead":3805,"gas":3806,"twice":3807,"safety":3808,"serious":3809,"turning":3810,"highly":3811,"lieutenant":3812,"firm":3813,"maria":3814,"amount":3815,"mixed":3816,"daniel":3817,"proposed":3818,"perfect":3819,"agreement":3820,"affairs":3821,"3rd":3822,"seconds":3823,"contemporary":3824,"paid":3825,"1943":3826,"prison":3827,"save":3828,"kitchen":3829,"label":3830,"administrative":3831,"intended":3832,"constructed":3833,"academic":3834,"nice":3835,"teacher":3836,"races":3837,"1956":3838,"formerly":3839,"corporation":3840,"ben":3841,"nation":3842,"issued":3843,"shut":3844,"1958":3845,"drums":3846,"housing":3847,"victoria":3848,"seems":3849,"opera":3850,"1959":3851,"graduated":3852,"function":3853,"von":3854,"mentioned":3855,"picked":3856,"build":3857,"recognized":3858,"shortly":3859,"protection":3860,"picture":3861,"notable":3862,"exchange":3863,"elections":3864,"1980s":3865,"loved":3866,"percent":3867,"racing":3868,"fish":3869,"elizabeth":3870,"garden":3871,"volume":3872,"hockey":3873,"1941":3874,"beside":3875,"settled":3876,"##ford":3877,"1940":3878,"competed":3879,"replied":3880,"drew":3881,"1948":3882,"actress":3883,"marine":3884,"scotland":3885,"steel":3886,"glanced":3887,"farm":3888,"steve":3889,"1957":3890,"risk":3891,"tonight":3892,"positive":3893,"magic":3894,"singles":3895,"effects":3896,"gray":3897,"screen":3898,"dog":3899,"##ja":3900,"residents":3901,"bus":3902,"sides":3903,"none":3904,"secondary":3905,"literature":3906,"polish":3907,"destroyed":3908,"flying":3909,"founder":3910,"households":3911,"1939":3912,"lay":3913,"reserve":3914,"usa":3915,"gallery":3916,"##ler":3917,"1946":3918,"industrial":3919,"younger":3920,"approach":3921,"appearances":3922,"urban":3923,"ones":3924,"1950":3925,"finish":3926,"avenue":3927,"powerful":3928,"fully":3929,"growth":3930,"page":3931,"honor":3932,"jersey":3933,"projects":3934,"advanced":3935,"revealed":3936,"basic":3937,"90":3938,"infantry":3939,"pair":3940,"equipment":3941,"visit":3942,"33":3943,"evening":3944,"search":3945,"grant":3946,"effort":3947,"solo":3948,"treatment":3949,"buried":3950,"republican":3951,"primarily":3952,"bottom":3953,"owner":3954,"1970s":3955,"israel":3956,"gives":3957,"jim":3958,"dream":3959,"bob":3960,"remain":3961,"spot":3962,"70":3963,"notes":3964,"produce":3965,"champions":3966,"contact":3967,"ed":3968,"soul":3969,"accepted":3970,"ways":3971,"del":3972,"##ally":3973,"losing":3974,"split":3975,"price":3976,"capacity":3977,"basis":3978,"trial":3979,"questions":3980,"##ina":3981,"1955":3982,"20th":3983,"guess":3984,"officially":3985,"memorial":3986,"naval":3987,"initial":3988,"##ization":3989,"whispered":3990,"median":3991,"engineer":3992,"##ful":3993,"sydney":3994,"##go":3995,"columbia":3996,"strength":3997,"300":3998,"1952":3999,"tears":4000,"senate":4001,"00":4002,"card":4003,"asian":4004,"agent":4005,"1947":4006,"software":4007,"44":4008,"draw":4009,"warm":4010,"supposed":4011,"com":4012,"pro":4013,"##il":4014,"transferred":4015,"leaned":4016,"##at":4017,"candidate":4018,"escape":4019,"mountains":4020,"asia":4021,"potential":4022,"activity":4023,"entertainment":4024,"seem":4025,"traffic":4026,"jackson":4027,"murder":4028,"36":4029,"slow":4030,"product":4031,"orchestra":4032,"haven":4033,"agency":4034,"bbc":4035,"taught":4036,"website":4037,"comedy":4038,"unable":4039,"storm":4040,"planning":4041,"albums":4042,"rugby":4043,"environment":4044,"scientific":4045,"grabbed":4046,"protect":4047,"##hi":4048,"boat":4049,"typically":4050,"1954":4051,"1953":4052,"damage":4053,"principal":4054,"divided":4055,"dedicated":4056,"mount":4057,"ohio":4058,"##berg":4059,"pick":4060,"fought":4061,"driver":4062,"##der":4063,"empty":4064,"shoulders":4065,"sort":4066,"thank":4067,"berlin":4068,"prominent":4069,"account":4070,"freedom":4071,"necessary":4072,"efforts":4073,"alex":4074,"headquarters":4075,"follows":4076,"alongside":4077,"des":4078,"simon":4079,"andrew":4080,"suggested":4081,"operating":4082,"learning":4083,"steps":4084,"1949":4085,"sweet":4086,"technical":4087,"begin":4088,"easily":4089,"34":4090,"teeth":4091,"speaking":4092,"settlement":4093,"scale":4094,"##sh":4095,"renamed":4096,"ray":4097,"max":4098,"enemy":4099,"semi":4100,"joint":4101,"compared":4102,"##rd":4103,"scottish":4104,"leadership":4105,"analysis":4106,"offers":4107,"georgia":4108,"pieces":4109,"captured":4110,"animal":4111,"deputy":4112,"guest":4113,"organized":4114,"##lin":4115,"tony":4116,"combined":4117,"method":4118,"challenge":4119,"1960s":4120,"huge":4121,"wants":4122,"battalion":4123,"sons":4124,"rise":4125,"crime":4126,"types":4127,"facilities":4128,"telling":4129,"path":4130,"1951":4131,"platform":4132,"sit":4133,"1990s":4134,"##lo":4135,"tells":4136,"assigned":4137,"rich":4138,"pull":4139,"##ot":4140,"commonly":4141,"alive":4142,"##za":4143,"letters":4144,"concept":4145,"conducted":4146,"wearing":4147,"happen":4148,"bought":4149,"becomes":4150,"holy":4151,"gets":4152,"ocean":4153,"defeat":4154,"languages":4155,"purchased":4156,"coffee":4157,"occurred":4158,"titled":4159,"##q":4160,"declared":4161,"applied":4162,"sciences":4163,"concert":4164,"sounds":4165,"jazz":4166,"brain":4167,"##me":4168,"painting":4169,"fleet":4170,"tax":4171,"nick":4172,"##ius":4173,"michigan":4174,"count":4175,"animals":4176,"leaders":4177,"episodes":4178,"##line":4179,"content":4180,"##den":4181,"birth":4182,"##it":4183,"clubs":4184,"64":4185,"palace":4186,"critical":4187,"refused":4188,"fair":4189,"leg":4190,"laughed":4191,"returning":4192,"surrounding":4193,"participated":4194,"formation":4195,"lifted":4196,"pointed":4197,"connected":4198,"rome":4199,"medicine":4200,"laid":4201,"taylor":4202,"santa":4203,"powers":4204,"adam":4205,"tall":4206,"shared":4207,"focused":4208,"knowing":4209,"yards":4210,"entrance":4211,"falls":4212,"##wa":4213,"calling":4214,"##ad":4215,"sources":4216,"chosen":4217,"beneath":4218,"resources":4219,"yard":4220,"##ite":4221,"nominated":4222,"silence":4223,"zone":4224,"defined":4225,"##que":4226,"gained":4227,"thirty":4228,"38":4229,"bodies":4230,"moon":4231,"##ard":4232,"adopted":4233,"christmas":4234,"widely":4235,"register":4236,"apart":4237,"iran":4238,"premier":4239,"serves":4240,"du":4241,"unknown":4242,"parties":4243,"##les":4244,"generation":4245,"##ff":4246,"continues":4247,"quick":4248,"fields":4249,"brigade":4250,"quiet":4251,"teaching":4252,"clothes":4253,"impact":4254,"weapons":4255,"partner":4256,"flat":4257,"theater":4258,"supreme":4259,"1938":4260,"37":4261,"relations":4262,"##tor":4263,"plants":4264,"suffered":4265,"1936":4266,"wilson":4267,"kids":4268,"begins":4269,"##age":4270,"1918":4271,"seats":4272,"armed":4273,"internet":4274,"models":4275,"worth":4276,"laws":4277,"400":4278,"communities":4279,"classes":4280,"background":4281,"knows":4282,"thanks":4283,"quarter":4284,"reaching":4285,"humans":4286,"carry":4287,"killing":4288,"format":4289,"kong":4290,"hong":4291,"setting":4292,"75":4293,"architecture":4294,"disease":4295,"railroad":4296,"inc":4297,"possibly":4298,"wish":4299,"arthur":4300,"thoughts":4301,"harry":4302,"doors":4303,"density":4304,"##di":4305,"crowd":4306,"illinois":4307,"stomach":4308,"tone":4309,"unique":4310,"reports":4311,"anyway":4312,"##ir":4313,"liberal":4314,"der":4315,"vehicle":4316,"thick":4317,"dry":4318,"drug":4319,"faced":4320,"largely":4321,"facility":4322,"theme":4323,"holds":4324,"creation":4325,"strange":4326,"colonel":4327,"##mi":4328,"revolution":4329,"bell":4330,"politics":4331,"turns":4332,"silent":4333,"rail":4334,"relief":4335,"independence":4336,"combat":4337,"shape":4338,"write":4339,"determined":4340,"sales":4341,"learned":4342,"4th":4343,"finger":4344,"oxford":4345,"providing":4346,"1937":4347,"heritage":4348,"fiction":4349,"situated":4350,"designated":4351,"allowing":4352,"distribution":4353,"hosted":4354,"##est":4355,"sight":4356,"interview":4357,"estimated":4358,"reduced":4359,"##ria":4360,"toronto":4361,"footballer":4362,"keeping":4363,"guys":4364,"damn":4365,"claim":4366,"motion":4367,"sport":4368,"sixth":4369,"stayed":4370,"##ze":4371,"en":4372,"rear":4373,"receive":4374,"handed":4375,"twelve":4376,"dress":4377,"audience":4378,"granted":4379,"brazil":4380,"##well":4381,"spirit":4382,"##ated":4383,"noticed":4384,"etc":4385,"olympic":4386,"representative":4387,"eric":4388,"tight":4389,"trouble":4390,"reviews":4391,"drink":4392,"vampire":4393,"missing":4394,"roles":4395,"ranked":4396,"newly":4397,"household":4398,"finals":4399,"wave":4400,"critics":4401,"##ee":4402,"phase":4403,"massachusetts":4404,"pilot":4405,"unlike":4406,"philadelphia":4407,"bright":4408,"guns":4409,"crown":4410,"organizations":4411,"roof":4412,"42":4413,"respectively":4414,"clearly":4415,"tongue":4416,"marked":4417,"circle":4418,"fox":4419,"korea":4420,"bronze":4421,"brian":4422,"expanded":4423,"sexual":4424,"supply":4425,"yourself":4426,"inspired":4427,"labour":4428,"fc":4429,"##ah":4430,"reference":4431,"vision":4432,"draft":4433,"connection":4434,"brand":4435,"reasons":4436,"1935":4437,"classic":4438,"driving":4439,"trip":4440,"jesus":4441,"cells":4442,"entry":4443,"1920":4444,"neither":4445,"trail":4446,"claims":4447,"atlantic":4448,"orders":4449,"labor":4450,"nose":4451,"afraid":4452,"identified":4453,"intelligence":4454,"calls":4455,"cancer":4456,"attacked":4457,"passing":4458,"stephen":4459,"positions":4460,"imperial":4461,"grey":4462,"jason":4463,"39":4464,"sunday":4465,"48":4466,"swedish":4467,"avoid":4468,"extra":4469,"uncle":4470,"message":4471,"covers":4472,"allows":4473,"surprise":4474,"materials":4475,"fame":4476,"hunter":4477,"##ji":4478,"1930":4479,"citizens":4480,"figures":4481,"davis":4482,"environmental":4483,"confirmed":4484,"shit":4485,"titles":4486,"di":4487,"performing":4488,"difference":4489,"acts":4490,"attacks":4491,"##ov":4492,"existing":4493,"votes":4494,"opportunity":4495,"nor":4496,"shop":4497,"entirely":4498,"trains":4499,"opposite":4500,"pakistan":4501,"##pa":4502,"develop":4503,"resulted":4504,"representatives":4505,"actions":4506,"reality":4507,"pressed":4508,"##ish":4509,"barely":4510,"wine":4511,"conversation":4512,"faculty":4513,"northwest":4514,"ends":4515,"documentary":4516,"nuclear":4517,"stock":4518,"grace":4519,"sets":4520,"eat":4521,"alternative":4522,"##ps":4523,"bag":4524,"resulting":4525,"creating":4526,"surprised":4527,"cemetery":4528,"1919":4529,"drop":4530,"finding":4531,"sarah":4532,"cricket":4533,"streets":4534,"tradition":4535,"ride":4536,"1933":4537,"exhibition":4538,"target":4539,"ear":4540,"explained":4541,"rain":4542,"composer":4543,"injury":4544,"apartment":4545,"municipal":4546,"educational":4547,"occupied":4548,"netherlands":4549,"clean":4550,"billion":4551,"constitution":4552,"learn":4553,"1914":4554,"maximum":4555,"classical":4556,"francis":4557,"lose":4558,"opposition":4559,"jose":4560,"ontario":4561,"bear":4562,"core":4563,"hills":4564,"rolled":4565,"ending":4566,"drawn":4567,"permanent":4568,"fun":4569,"##tes":4570,"##lla":4571,"lewis":4572,"sites":4573,"chamber":4574,"ryan":4575,"##way":4576,"scoring":4577,"height":4578,"1934":4579,"##house":4580,"lyrics":4581,"staring":4582,"55":4583,"officials":4584,"1917":4585,"snow":4586,"oldest":4587,"##tic":4588,"orange":4589,"##ger":4590,"qualified":4591,"interior":4592,"apparently":4593,"succeeded":4594,"thousand":4595,"dinner":4596,"lights":4597,"existence":4598,"fans":4599,"heavily":4600,"41":4601,"greatest":4602,"conservative":4603,"send":4604,"bowl":4605,"plus":4606,"enter":4607,"catch":4608,"##un":4609,"economy":4610,"duty":4611,"1929":4612,"speech":4613,"authorities":4614,"princess":4615,"performances":4616,"versions":4617,"shall":4618,"graduate":4619,"pictures":4620,"effective":4621,"remembered":4622,"poetry":4623,"desk":4624,"crossed":4625,"starring":4626,"starts":4627,"passenger":4628,"sharp":4629,"##ant":4630,"acres":4631,"ass":4632,"weather":4633,"falling":4634,"rank":4635,"fund":4636,"supporting":4637,"check":4638,"adult":4639,"publishing":4640,"heads":4641,"cm":4642,"southeast":4643,"lane":4644,"##burg":4645,"application":4646,"bc":4647,"##ura":4648,"les":4649,"condition":4650,"transfer":4651,"prevent":4652,"display":4653,"ex":4654,"regions":4655,"earl":4656,"federation":4657,"cool":4658,"relatively":4659,"answered":4660,"besides":4661,"1928":4662,"obtained":4663,"portion":4664,"##town":4665,"mix":4666,"##ding":4667,"reaction":4668,"liked":4669,"dean":4670,"express":4671,"peak":4672,"1932":4673,"##tte":4674,"counter":4675,"religion":4676,"chain":4677,"rare":4678,"miller":4679,"convention":4680,"aid":4681,"lie":4682,"vehicles":4683,"mobile":4684,"perform":4685,"squad":4686,"wonder":4687,"lying":4688,"crazy":4689,"sword":4690,"##ping":4691,"attempted":4692,"centuries":4693,"weren":4694,"philosophy":4695,"category":4696,"##ize":4697,"anna":4698,"interested":4699,"47":4700,"sweden":4701,"wolf":4702,"frequently":4703,"abandoned":4704,"kg":4705,"literary":4706,"alliance":4707,"task":4708,"entitled":4709,"##ay":4710,"threw":4711,"promotion":4712,"factory":4713,"tiny":4714,"soccer":4715,"visited":4716,"matt":4717,"fm":4718,"achieved":4719,"52":4720,"defence":4721,"internal":4722,"persian":4723,"43":4724,"methods":4725,"##ging":4726,"arrested":4727,"otherwise":4728,"cambridge":4729,"programming":4730,"villages":4731,"elementary":4732,"districts":4733,"rooms":4734,"criminal":4735,"conflict":4736,"worry":4737,"trained":4738,"1931":4739,"attempts":4740,"waited":4741,"signal":4742,"bird":4743,"truck":4744,"subsequent":4745,"programme":4746,"##ol":4747,"ad":4748,"49":4749,"communist":4750,"details":4751,"faith":4752,"sector":4753,"patrick":4754,"carrying":4755,"laugh":4756,"##ss":4757,"controlled":4758,"korean":4759,"showing":4760,"origin":4761,"fuel":4762,"evil":4763,"1927":4764,"##ent":4765,"brief":4766,"identity":4767,"darkness":4768,"address":4769,"pool":4770,"missed":4771,"publication":4772,"web":4773,"planet":4774,"ian":4775,"anne":4776,"wings":4777,"invited":4778,"##tt":4779,"briefly":4780,"standards":4781,"kissed":4782,"##be":4783,"ideas":4784,"climate":4785,"causing":4786,"walter":4787,"worse":4788,"albert":4789,"articles":4790,"winners":4791,"desire":4792,"aged":4793,"northeast":4794,"dangerous":4795,"gate":4796,"doubt":4797,"1922":4798,"wooden":4799,"multi":4800,"##ky":4801,"poet":4802,"rising":4803,"funding":4804,"46":4805,"communications":4806,"communication":4807,"violence":4808,"copies":4809,"prepared":4810,"ford":4811,"investigation":4812,"skills":4813,"1924":4814,"pulling":4815,"electronic":4816,"##ak":4817,"##ial":4818,"##han":4819,"containing":4820,"ultimately":4821,"offices":4822,"singing":4823,"understanding":4824,"restaurant":4825,"tomorrow":4826,"fashion":4827,"christ":4828,"ward":4829,"da":4830,"pope":4831,"stands":4832,"5th":4833,"flow":4834,"studios":4835,"aired":4836,"commissioned":4837,"contained":4838,"exist":4839,"fresh":4840,"americans":4841,"##per":4842,"wrestling":4843,"approved":4844,"kid":4845,"employed":4846,"respect":4847,"suit":4848,"1925":4849,"angel":4850,"asking":4851,"increasing":4852,"frame":4853,"angry":4854,"selling":4855,"1950s":4856,"thin":4857,"finds":4858,"##nd":4859,"temperature":4860,"statement":4861,"ali":4862,"explain":4863,"inhabitants":4864,"towns":4865,"extensive":4866,"narrow":4867,"51":4868,"jane":4869,"flowers":4870,"images":4871,"promise":4872,"somewhere":4873,"object":4874,"fly":4875,"closely":4876,"##ls":4877,"1912":4878,"bureau":4879,"cape":4880,"1926":4881,"weekly":4882,"presidential":4883,"legislative":4884,"1921":4885,"##ai":4886,"##au":4887,"launch":4888,"founding":4889,"##ny":4890,"978":4891,"##ring":4892,"artillery":4893,"strike":4894,"un":4895,"institutions":4896,"roll":4897,"writers":4898,"landing":4899,"chose":4900,"kevin":4901,"anymore":4902,"pp":4903,"##ut":4904,"attorney":4905,"fit":4906,"dan":4907,"billboard":4908,"receiving":4909,"agricultural":4910,"breaking":4911,"sought":4912,"dave":4913,"admitted":4914,"lands":4915,"mexican":4916,"##bury":4917,"charlie":4918,"specifically":4919,"hole":4920,"iv":4921,"howard":4922,"credit":4923,"moscow":4924,"roads":4925,"accident":4926,"1923":4927,"proved":4928,"wear":4929,"struck":4930,"hey":4931,"guards":4932,"stuff":4933,"slid":4934,"expansion":4935,"1915":4936,"cat":4937,"anthony":4938,"##kin":4939,"melbourne":4940,"opposed":4941,"sub":4942,"southwest":4943,"architect":4944,"failure":4945,"plane":4946,"1916":4947,"##ron":4948,"map":4949,"camera":4950,"tank":4951,"listen":4952,"regarding":4953,"wet":4954,"introduction":4955,"metropolitan":4956,"link":4957,"ep":4958,"fighter":4959,"inch":4960,"grown":4961,"gene":4962,"anger":4963,"fixed":4964,"buy":4965,"dvd":4966,"khan":4967,"domestic":4968,"worldwide":4969,"chapel":4970,"mill":4971,"functions":4972,"examples":4973,"##head":4974,"developing":4975,"1910":4976,"turkey":4977,"hits":4978,"pocket":4979,"antonio":4980,"papers":4981,"grow":4982,"unless":4983,"circuit":4984,"18th":4985,"concerned":4986,"attached":4987,"journalist":4988,"selection":4989,"journey":4990,"converted":4991,"provincial":4992,"painted":4993,"hearing":4994,"aren":4995,"bands":4996,"negative":4997,"aside":4998,"wondered":4999,"knight":5000,"lap":5001,"survey":5002,"ma":5003,"##ow":5004,"noise":5005,"billy":5006,"##ium":5007,"shooting":5008,"guide":5009,"bedroom":5010,"priest":5011,"resistance":5012,"motor":5013,"homes":5014,"sounded":5015,"giant":5016,"##mer":5017,"150":5018,"scenes":5019,"equal":5020,"comic":5021,"patients":5022,"hidden":5023,"solid":5024,"actual":5025,"bringing":5026,"afternoon":5027,"touched":5028,"funds":5029,"wedding":5030,"consisted":5031,"marie":5032,"canal":5033,"sr":5034,"kim":5035,"treaty":5036,"turkish":5037,"recognition":5038,"residence":5039,"cathedral":5040,"broad":5041,"knees":5042,"incident":5043,"shaped":5044,"fired":5045,"norwegian":5046,"handle":5047,"cheek":5048,"contest":5049,"represent":5050,"##pe":5051,"representing":5052,"beauty":5053,"##sen":5054,"birds":5055,"advantage":5056,"emergency":5057,"wrapped":5058,"drawing":5059,"notice":5060,"pink":5061,"broadcasting":5062,"##ong":5063,"somehow":5064,"bachelor":5065,"seventh":5066,"collected":5067,"registered":5068,"establishment":5069,"alan":5070,"assumed":5071,"chemical":5072,"personnel":5073,"roger":5074,"retirement":5075,"jeff":5076,"portuguese":5077,"wore":5078,"tied":5079,"device":5080,"threat":5081,"progress":5082,"advance":5083,"##ised":5084,"banks":5085,"hired":5086,"manchester":5087,"nfl":5088,"teachers":5089,"structures":5090,"forever":5091,"##bo":5092,"tennis":5093,"helping":5094,"saturday":5095,"sale":5096,"applications":5097,"junction":5098,"hip":5099,"incorporated":5100,"neighborhood":5101,"dressed":5102,"ceremony":5103,"##ds":5104,"influenced":5105,"hers":5106,"visual":5107,"stairs":5108,"decades":5109,"inner":5110,"kansas":5111,"hung":5112,"hoped":5113,"gain":5114,"scheduled":5115,"downtown":5116,"engaged":5117,"austria":5118,"clock":5119,"norway":5120,"certainly":5121,"pale":5122,"protected":5123,"1913":5124,"victor":5125,"employees":5126,"plate":5127,"putting":5128,"surrounded":5129,"##ists":5130,"finishing":5131,"blues":5132,"tropical":5133,"##ries":5134,"minnesota":5135,"consider":5136,"philippines":5137,"accept":5138,"54":5139,"retrieved":5140,"1900":5141,"concern":5142,"anderson":5143,"properties":5144,"institution":5145,"gordon":5146,"successfully":5147,"vietnam":5148,"##dy":5149,"backing":5150,"outstanding":5151,"muslim":5152,"crossing":5153,"folk":5154,"producing":5155,"usual":5156,"demand":5157,"occurs":5158,"observed":5159,"lawyer":5160,"educated":5161,"##ana":5162,"kelly":5163,"string":5164,"pleasure":5165,"budget":5166,"items":5167,"quietly":5168,"colorado":5169,"philip":5170,"typical":5171,"##worth":5172,"derived":5173,"600":5174,"survived":5175,"asks":5176,"mental":5177,"##ide":5178,"56":5179,"jake":5180,"jews":5181,"distinguished":5182,"ltd":5183,"1911":5184,"sri":5185,"extremely":5186,"53":5187,"athletic":5188,"loud":5189,"thousands":5190,"worried":5191,"shadow":5192,"transportation":5193,"horses":5194,"weapon":5195,"arena":5196,"importance":5197,"users":5198,"tim":5199,"objects":5200,"contributed":5201,"dragon":5202,"douglas":5203,"aware":5204,"senator":5205,"johnny":5206,"jordan":5207,"sisters":5208,"engines":5209,"flag":5210,"investment":5211,"samuel":5212,"shock":5213,"capable":5214,"clark":5215,"row":5216,"wheel":5217,"refers":5218,"session":5219,"familiar":5220,"biggest":5221,"wins":5222,"hate":5223,"maintained":5224,"drove":5225,"hamilton":5226,"request":5227,"expressed":5228,"injured":5229,"underground":5230,"churches":5231,"walker":5232,"wars":5233,"tunnel":5234,"passes":5235,"stupid":5236,"agriculture":5237,"softly":5238,"cabinet":5239,"regarded":5240,"joining":5241,"indiana":5242,"##ea":5243,"##ms":5244,"push":5245,"dates":5246,"spend":5247,"behavior":5248,"woods":5249,"protein":5250,"gently":5251,"chase":5252,"morgan":5253,"mention":5254,"burning":5255,"wake":5256,"combination":5257,"occur":5258,"mirror":5259,"leads":5260,"jimmy":5261,"indeed":5262,"impossible":5263,"singapore":5264,"paintings":5265,"covering":5266,"##nes":5267,"soldier":5268,"locations":5269,"attendance":5270,"sell":5271,"historian":5272,"wisconsin":5273,"invasion":5274,"argued":5275,"painter":5276,"diego":5277,"changing":5278,"egypt":5279,"##don":5280,"experienced":5281,"inches":5282,"##ku":5283,"missouri":5284,"vol":5285,"grounds":5286,"spoken":5287,"switzerland":5288,"##gan":5289,"reform":5290,"rolling":5291,"ha":5292,"forget":5293,"massive":5294,"resigned":5295,"burned":5296,"allen":5297,"tennessee":5298,"locked":5299,"values":5300,"improved":5301,"##mo":5302,"wounded":5303,"universe":5304,"sick":5305,"dating":5306,"facing":5307,"pack":5308,"purchase":5309,"user":5310,"##pur":5311,"moments":5312,"##ul":5313,"merged":5314,"anniversary":5315,"1908":5316,"coal":5317,"brick":5318,"understood":5319,"causes":5320,"dynasty":5321,"queensland":5322,"establish":5323,"stores":5324,"crisis":5325,"promote":5326,"hoping":5327,"views":5328,"cards":5329,"referee":5330,"extension":5331,"##si":5332,"raise":5333,"arizona":5334,"improve":5335,"colonial":5336,"formal":5337,"charged":5338,"##rt":5339,"palm":5340,"lucky":5341,"hide":5342,"rescue":5343,"faces":5344,"95":5345,"feelings":5346,"candidates":5347,"juan":5348,"##ell":5349,"goods":5350,"6th":5351,"courses":5352,"weekend":5353,"59":5354,"luke":5355,"cash":5356,"fallen":5357,"##om":5358,"delivered":5359,"affected":5360,"installed":5361,"carefully":5362,"tries":5363,"swiss":5364,"hollywood":5365,"costs":5366,"lincoln":5367,"responsibility":5368,"##he":5369,"shore":5370,"file":5371,"proper":5372,"normally":5373,"maryland":5374,"assistance":5375,"jump":5376,"constant":5377,"offering":5378,"friendly":5379,"waters":5380,"persons":5381,"realize":5382,"contain":5383,"trophy":5384,"800":5385,"partnership":5386,"factor":5387,"58":5388,"musicians":5389,"cry":5390,"bound":5391,"oregon":5392,"indicated":5393,"hero":5394,"houston":5395,"medium":5396,"##ure":5397,"consisting":5398,"somewhat":5399,"##ara":5400,"57":5401,"cycle":5402,"##che":5403,"beer":5404,"moore":5405,"frederick":5406,"gotten":5407,"eleven":5408,"worst":5409,"weak":5410,"approached":5411,"arranged":5412,"chin":5413,"loan":5414,"universal":5415,"bond":5416,"fifteen":5417,"pattern":5418,"disappeared":5419,"##ney":5420,"translated":5421,"##zed":5422,"lip":5423,"arab":5424,"capture":5425,"interests":5426,"insurance":5427,"##chi":5428,"shifted":5429,"cave":5430,"prix":5431,"warning":5432,"sections":5433,"courts":5434,"coat":5435,"plot":5436,"smell":5437,"feed":5438,"golf":5439,"favorite":5440,"maintain":5441,"knife":5442,"vs":5443,"voted":5444,"degrees":5445,"finance":5446,"quebec":5447,"opinion":5448,"translation":5449,"manner":5450,"ruled":5451,"operate":5452,"productions":5453,"choose":5454,"musician":5455,"discovery":5456,"confused":5457,"tired":5458,"separated":5459,"stream":5460,"techniques":5461,"committed":5462,"attend":5463,"ranking":5464,"kings":5465,"throw":5466,"passengers":5467,"measure":5468,"horror":5469,"fan":5470,"mining":5471,"sand":5472,"danger":5473,"salt":5474,"calm":5475,"decade":5476,"dam":5477,"require":5478,"runner":5479,"##ik":5480,"rush":5481,"associate":5482,"greece":5483,"##ker":5484,"rivers":5485,"consecutive":5486,"matthew":5487,"##ski":5488,"sighed":5489,"sq":5490,"documents":5491,"steam":5492,"edited":5493,"closing":5494,"tie":5495,"accused":5496,"1905":5497,"##ini":5498,"islamic":5499,"distributed":5500,"directors":5501,"organisation":5502,"bruce":5503,"7th":5504,"breathing":5505,"mad":5506,"lit":5507,"arrival":5508,"concrete":5509,"taste":5510,"08":5511,"composition":5512,"shaking":5513,"faster":5514,"amateur":5515,"adjacent":5516,"stating":5517,"1906":5518,"twin":5519,"flew":5520,"##ran":5521,"tokyo":5522,"publications":5523,"##tone":5524,"obviously":5525,"ridge":5526,"storage":5527,"1907":5528,"carl":5529,"pages":5530,"concluded":5531,"desert":5532,"driven":5533,"universities":5534,"ages":5535,"terminal":5536,"sequence":5537,"borough":5538,"250":5539,"constituency":5540,"creative":5541,"cousin":5542,"economics":5543,"dreams":5544,"margaret":5545,"notably":5546,"reduce":5547,"montreal":5548,"mode":5549,"17th":5550,"ears":5551,"saved":5552,"jan":5553,"vocal":5554,"##ica":5555,"1909":5556,"andy":5557,"##jo":5558,"riding":5559,"roughly":5560,"threatened":5561,"##ise":5562,"meters":5563,"meanwhile":5564,"landed":5565,"compete":5566,"repeated":5567,"grass":5568,"czech":5569,"regularly":5570,"charges":5571,"tea":5572,"sudden":5573,"appeal":5574,"##ung":5575,"solution":5576,"describes":5577,"pierre":5578,"classification":5579,"glad":5580,"parking":5581,"##ning":5582,"belt":5583,"physics":5584,"99":5585,"rachel":5586,"add":5587,"hungarian":5588,"participate":5589,"expedition":5590,"damaged":5591,"gift":5592,"childhood":5593,"85":5594,"fifty":5595,"##red":5596,"mathematics":5597,"jumped":5598,"letting":5599,"defensive":5600,"mph":5601,"##ux":5602,"##gh":5603,"testing":5604,"##hip":5605,"hundreds":5606,"shoot":5607,"owners":5608,"matters":5609,"smoke":5610,"israeli":5611,"kentucky":5612,"dancing":5613,"mounted":5614,"grandfather":5615,"emma":5616,"designs":5617,"profit":5618,"argentina":5619,"##gs":5620,"truly":5621,"li":5622,"lawrence":5623,"cole":5624,"begun":5625,"detroit":5626,"willing":5627,"branches":5628,"smiling":5629,"decide":5630,"miami":5631,"enjoyed":5632,"recordings":5633,"##dale":5634,"poverty":5635,"ethnic":5636,"gay":5637,"##bi":5638,"gary":5639,"arabic":5640,"09":5641,"accompanied":5642,"##one":5643,"##ons":5644,"fishing":5645,"determine":5646,"residential":5647,"acid":5648,"##ary":5649,"alice":5650,"returns":5651,"starred":5652,"mail":5653,"##ang":5654,"jonathan":5655,"strategy":5656,"##ue":5657,"net":5658,"forty":5659,"cook":5660,"businesses":5661,"equivalent":5662,"commonwealth":5663,"distinct":5664,"ill":5665,"##cy":5666,"seriously":5667,"##ors":5668,"##ped":5669,"shift":5670,"harris":5671,"replace":5672,"rio":5673,"imagine":5674,"formula":5675,"ensure":5676,"##ber":5677,"additionally":5678,"scheme":5679,"conservation":5680,"occasionally":5681,"purposes":5682,"feels":5683,"favor":5684,"##and":5685,"##ore":5686,"1930s":5687,"contrast":5688,"hanging":5689,"hunt":5690,"movies":5691,"1904":5692,"instruments":5693,"victims":5694,"danish":5695,"christopher":5696,"busy":5697,"demon":5698,"sugar":5699,"earliest":5700,"colony":5701,"studying":5702,"balance":5703,"duties":5704,"##ks":5705,"belgium":5706,"slipped":5707,"carter":5708,"05":5709,"visible":5710,"stages":5711,"iraq":5712,"fifa":5713,"##im":5714,"commune":5715,"forming":5716,"zero":5717,"07":5718,"continuing":5719,"talked":5720,"counties":5721,"legend":5722,"bathroom":5723,"option":5724,"tail":5725,"clay":5726,"daughters":5727,"afterwards":5728,"severe":5729,"jaw":5730,"visitors":5731,"##ded":5732,"devices":5733,"aviation":5734,"russell":5735,"kate":5736,"##vi":5737,"entering":5738,"subjects":5739,"##ino":5740,"temporary":5741,"swimming":5742,"forth":5743,"smooth":5744,"ghost":5745,"audio":5746,"bush":5747,"operates":5748,"rocks":5749,"movements":5750,"signs":5751,"eddie":5752,"##tz":5753,"ann":5754,"voices":5755,"honorary":5756,"06":5757,"memories":5758,"dallas":5759,"pure":5760,"measures":5761,"racial":5762,"promised":5763,"66":5764,"harvard":5765,"ceo":5766,"16th":5767,"parliamentary":5768,"indicate":5769,"benefit":5770,"flesh":5771,"dublin":5772,"louisiana":5773,"1902":5774,"1901":5775,"patient":5776,"sleeping":5777,"1903":5778,"membership":5779,"coastal":5780,"medieval":5781,"wanting":5782,"element":5783,"scholars":5784,"rice":5785,"62":5786,"limit":5787,"survive":5788,"makeup":5789,"rating":5790,"definitely":5791,"collaboration":5792,"obvious":5793,"##tan":5794,"boss":5795,"ms":5796,"baron":5797,"birthday":5798,"linked":5799,"soil":5800,"diocese":5801,"##lan":5802,"ncaa":5803,"##mann":5804,"offensive":5805,"shell":5806,"shouldn":5807,"waist":5808,"##tus":5809,"plain":5810,"ross":5811,"organ":5812,"resolution":5813,"manufacturing":5814,"adding":5815,"relative":5816,"kennedy":5817,"98":5818,"whilst":5819,"moth":5820,"marketing":5821,"gardens":5822,"crash":5823,"72":5824,"heading":5825,"partners":5826,"credited":5827,"carlos":5828,"moves":5829,"cable":5830,"##zi":5831,"marshall":5832,"##out":5833,"depending":5834,"bottle":5835,"represents":5836,"rejected":5837,"responded":5838,"existed":5839,"04":5840,"jobs":5841,"denmark":5842,"lock":5843,"##ating":5844,"treated":5845,"graham":5846,"routes":5847,"talent":5848,"commissioner":5849,"drugs":5850,"secure":5851,"tests":5852,"reign":5853,"restored":5854,"photography":5855,"##gi":5856,"contributions":5857,"oklahoma":5858,"designer":5859,"disc":5860,"grin":5861,"seattle":5862,"robin":5863,"paused":5864,"atlanta":5865,"unusual":5866,"##gate":5867,"praised":5868,"las":5869,"laughing":5870,"satellite":5871,"hungary":5872,"visiting":5873,"##sky":5874,"interesting":5875,"factors":5876,"deck":5877,"poems":5878,"norman":5879,"##water":5880,"stuck":5881,"speaker":5882,"rifle":5883,"domain":5884,"premiered":5885,"##her":5886,"dc":5887,"comics":5888,"actors":5889,"01":5890,"reputation":5891,"eliminated":5892,"8th":5893,"ceiling":5894,"prisoners":5895,"script":5896,"##nce":5897,"leather":5898,"austin":5899,"mississippi":5900,"rapidly":5901,"admiral":5902,"parallel":5903,"charlotte":5904,"guilty":5905,"tools":5906,"gender":5907,"divisions":5908,"fruit":5909,"##bs":5910,"laboratory":5911,"nelson":5912,"fantasy":5913,"marry":5914,"rapid":5915,"aunt":5916,"tribe":5917,"requirements":5918,"aspects":5919,"suicide":5920,"amongst":5921,"adams":5922,"bone":5923,"ukraine":5924,"abc":5925,"kick":5926,"sees":5927,"edinburgh":5928,"clothing":5929,"column":5930,"rough":5931,"gods":5932,"hunting":5933,"broadway":5934,"gathered":5935,"concerns":5936,"##ek":5937,"spending":5938,"ty":5939,"12th":5940,"snapped":5941,"requires":5942,"solar":5943,"bones":5944,"cavalry":5945,"##tta":5946,"iowa":5947,"drinking":5948,"waste":5949,"index":5950,"franklin":5951,"charity":5952,"thompson":5953,"stewart":5954,"tip":5955,"flash":5956,"landscape":5957,"friday":5958,"enjoy":5959,"singh":5960,"poem":5961,"listening":5962,"##back":5963,"eighth":5964,"fred":5965,"differences":5966,"adapted":5967,"bomb":5968,"ukrainian":5969,"surgery":5970,"corporate":5971,"masters":5972,"anywhere":5973,"##more":5974,"waves":5975,"odd":5976,"sean":5977,"portugal":5978,"orleans":5979,"dick":5980,"debate":5981,"kent":5982,"eating":5983,"puerto":5984,"cleared":5985,"96":5986,"expect":5987,"cinema":5988,"97":5989,"guitarist":5990,"blocks":5991,"electrical":5992,"agree":5993,"involving":5994,"depth":5995,"dying":5996,"panel":5997,"struggle":5998,"##ged":5999,"peninsula":6000,"adults":6001,"novels":6002,"emerged":6003,"vienna":6004,"metro":6005,"debuted":6006,"shoes":6007,"tamil":6008,"songwriter":6009,"meets":6010,"prove":6011,"beating":6012,"instance":6013,"heaven":6014,"scared":6015,"sending":6016,"marks":6017,"artistic":6018,"passage":6019,"superior":6020,"03":6021,"significantly":6022,"shopping":6023,"##tive":6024,"retained":6025,"##izing":6026,"malaysia":6027,"technique":6028,"cheeks":6029,"##ola":6030,"warren":6031,"maintenance":6032,"destroy":6033,"extreme":6034,"allied":6035,"120":6036,"appearing":6037,"##yn":6038,"fill":6039,"advice":6040,"alabama":6041,"qualifying":6042,"policies":6043,"cleveland":6044,"hat":6045,"battery":6046,"smart":6047,"authors":6048,"10th":6049,"soundtrack":6050,"acted":6051,"dated":6052,"lb":6053,"glance":6054,"equipped":6055,"coalition":6056,"funny":6057,"outer":6058,"ambassador":6059,"roy":6060,"possibility":6061,"couples":6062,"campbell":6063,"dna":6064,"loose":6065,"ethan":6066,"supplies":6067,"1898":6068,"gonna":6069,"88":6070,"monster":6071,"##res":6072,"shake":6073,"agents":6074,"frequency":6075,"springs":6076,"dogs":6077,"practices":6078,"61":6079,"gang":6080,"plastic":6081,"easier":6082,"suggests":6083,"gulf":6084,"blade":6085,"exposed":6086,"colors":6087,"industries":6088,"markets":6089,"pan":6090,"nervous":6091,"electoral":6092,"charts":6093,"legislation":6094,"ownership":6095,"##idae":6096,"mac":6097,"appointment":6098,"shield":6099,"copy":6100,"assault":6101,"socialist":6102,"abbey":6103,"monument":6104,"license":6105,"throne":6106,"employment":6107,"jay":6108,"93":6109,"replacement":6110,"charter":6111,"cloud":6112,"powered":6113,"suffering":6114,"accounts":6115,"oak":6116,"connecticut":6117,"strongly":6118,"wright":6119,"colour":6120,"crystal":6121,"13th":6122,"context":6123,"welsh":6124,"networks":6125,"voiced":6126,"gabriel":6127,"jerry":6128,"##cing":6129,"forehead":6130,"mp":6131,"##ens":6132,"manage":6133,"schedule":6134,"totally":6135,"remix":6136,"##ii":6137,"forests":6138,"occupation":6139,"print":6140,"nicholas":6141,"brazilian":6142,"strategic":6143,"vampires":6144,"engineers":6145,"76":6146,"roots":6147,"seek":6148,"correct":6149,"instrumental":6150,"und":6151,"alfred":6152,"backed":6153,"hop":6154,"##des":6155,"stanley":6156,"robinson":6157,"traveled":6158,"wayne":6159,"welcome":6160,"austrian":6161,"achieve":6162,"67":6163,"exit":6164,"rates":6165,"1899":6166,"strip":6167,"whereas":6168,"##cs":6169,"sing":6170,"deeply":6171,"adventure":6172,"bobby":6173,"rick":6174,"jamie":6175,"careful":6176,"components":6177,"cap":6178,"useful":6179,"personality":6180,"knee":6181,"##shi":6182,"pushing":6183,"hosts":6184,"02":6185,"protest":6186,"ca":6187,"ottoman":6188,"symphony":6189,"##sis":6190,"63":6191,"boundary":6192,"1890":6193,"processes":6194,"considering":6195,"considerable":6196,"tons":6197,"##work":6198,"##ft":6199,"##nia":6200,"cooper":6201,"trading":6202,"dear":6203,"conduct":6204,"91":6205,"illegal":6206,"apple":6207,"revolutionary":6208,"holiday":6209,"definition":6210,"harder":6211,"##van":6212,"jacob":6213,"circumstances":6214,"destruction":6215,"##lle":6216,"popularity":6217,"grip":6218,"classified":6219,"liverpool":6220,"donald":6221,"baltimore":6222,"flows":6223,"seeking":6224,"honour":6225,"approval":6226,"92":6227,"mechanical":6228,"till":6229,"happening":6230,"statue":6231,"critic":6232,"increasingly":6233,"immediate":6234,"describe":6235,"commerce":6236,"stare":6237,"##ster":6238,"indonesia":6239,"meat":6240,"rounds":6241,"boats":6242,"baker":6243,"orthodox":6244,"depression":6245,"formally":6246,"worn":6247,"naked":6248,"claire":6249,"muttered":6250,"sentence":6251,"11th":6252,"emily":6253,"document":6254,"77":6255,"criticism":6256,"wished":6257,"vessel":6258,"spiritual":6259,"bent":6260,"virgin":6261,"parker":6262,"minimum":6263,"murray":6264,"lunch":6265,"danny":6266,"printed":6267,"compilation":6268,"keyboards":6269,"false":6270,"blow":6271,"belonged":6272,"68":6273,"raising":6274,"78":6275,"cutting":6276,"##board":6277,"pittsburgh":6278,"##up":6279,"9th":6280,"shadows":6281,"81":6282,"hated":6283,"indigenous":6284,"jon":6285,"15th":6286,"barry":6287,"scholar":6288,"ah":6289,"##zer":6290,"oliver":6291,"##gy":6292,"stick":6293,"susan":6294,"meetings":6295,"attracted":6296,"spell":6297,"romantic":6298,"##ver":6299,"ye":6300,"1895":6301,"photo":6302,"demanded":6303,"customers":6304,"##ac":6305,"1896":6306,"logan":6307,"revival":6308,"keys":6309,"modified":6310,"commanded":6311,"jeans":6312,"##ious":6313,"upset":6314,"raw":6315,"phil":6316,"detective":6317,"hiding":6318,"resident":6319,"vincent":6320,"##bly":6321,"experiences":6322,"diamond":6323,"defeating":6324,"coverage":6325,"lucas":6326,"external":6327,"parks":6328,"franchise":6329,"helen":6330,"bible":6331,"successor":6332,"percussion":6333,"celebrated":6334,"il":6335,"lift":6336,"profile":6337,"clan":6338,"romania":6339,"##ied":6340,"mills":6341,"##su":6342,"nobody":6343,"achievement":6344,"shrugged":6345,"fault":6346,"1897":6347,"rhythm":6348,"initiative":6349,"breakfast":6350,"carbon":6351,"700":6352,"69":6353,"lasted":6354,"violent":6355,"74":6356,"wound":6357,"ken":6358,"killer":6359,"gradually":6360,"filmed":6361,"°c":6362,"dollars":6363,"processing":6364,"94":6365,"remove":6366,"criticized":6367,"guests":6368,"sang":6369,"chemistry":6370,"##vin":6371,"legislature":6372,"disney":6373,"##bridge":6374,"uniform":6375,"escaped":6376,"integrated":6377,"proposal":6378,"purple":6379,"denied":6380,"liquid":6381,"karl":6382,"influential":6383,"morris":6384,"nights":6385,"stones":6386,"intense":6387,"experimental":6388,"twisted":6389,"71":6390,"84":6391,"##ld":6392,"pace":6393,"nazi":6394,"mitchell":6395,"ny":6396,"blind":6397,"reporter":6398,"newspapers":6399,"14th":6400,"centers":6401,"burn":6402,"basin":6403,"forgotten":6404,"surviving":6405,"filed":6406,"collections":6407,"monastery":6408,"losses":6409,"manual":6410,"couch":6411,"description":6412,"appropriate":6413,"merely":6414,"tag":6415,"missions":6416,"sebastian":6417,"restoration":6418,"replacing":6419,"triple":6420,"73":6421,"elder":6422,"julia":6423,"warriors":6424,"benjamin":6425,"julian":6426,"convinced":6427,"stronger":6428,"amazing":6429,"declined":6430,"versus":6431,"merchant":6432,"happens":6433,"output":6434,"finland":6435,"bare":6436,"barbara":6437,"absence":6438,"ignored":6439,"dawn":6440,"injuries":6441,"##port":6442,"producers":6443,"##ram":6444,"82":6445,"luis":6446,"##ities":6447,"kw":6448,"admit":6449,"expensive":6450,"electricity":6451,"nba":6452,"exception":6453,"symbol":6454,"##ving":6455,"ladies":6456,"shower":6457,"sheriff":6458,"characteristics":6459,"##je":6460,"aimed":6461,"button":6462,"ratio":6463,"effectively":6464,"summit":6465,"angle":6466,"jury":6467,"bears":6468,"foster":6469,"vessels":6470,"pants":6471,"executed":6472,"evans":6473,"dozen":6474,"advertising":6475,"kicked":6476,"patrol":6477,"1889":6478,"competitions":6479,"lifetime":6480,"principles":6481,"athletics":6482,"##logy":6483,"birmingham":6484,"sponsored":6485,"89":6486,"rob":6487,"nomination":6488,"1893":6489,"acoustic":6490,"##sm":6491,"creature":6492,"longest":6493,"##tra":6494,"credits":6495,"harbor":6496,"dust":6497,"josh":6498,"##so":6499,"territories":6500,"milk":6501,"infrastructure":6502,"completion":6503,"thailand":6504,"indians":6505,"leon":6506,"archbishop":6507,"##sy":6508,"assist":6509,"pitch":6510,"blake":6511,"arrangement":6512,"girlfriend":6513,"serbian":6514,"operational":6515,"hence":6516,"sad":6517,"scent":6518,"fur":6519,"dj":6520,"sessions":6521,"hp":6522,"refer":6523,"rarely":6524,"##ora":6525,"exists":6526,"1892":6527,"##ten":6528,"scientists":6529,"dirty":6530,"penalty":6531,"burst":6532,"portrait":6533,"seed":6534,"79":6535,"pole":6536,"limits":6537,"rival":6538,"1894":6539,"stable":6540,"alpha":6541,"grave":6542,"constitutional":6543,"alcohol":6544,"arrest":6545,"flower":6546,"mystery":6547,"devil":6548,"architectural":6549,"relationships":6550,"greatly":6551,"habitat":6552,"##istic":6553,"larry":6554,"progressive":6555,"remote":6556,"cotton":6557,"##ics":6558,"##ok":6559,"preserved":6560,"reaches":6561,"##ming":6562,"cited":6563,"86":6564,"vast":6565,"scholarship":6566,"decisions":6567,"cbs":6568,"joy":6569,"teach":6570,"1885":6571,"editions":6572,"knocked":6573,"eve":6574,"searching":6575,"partly":6576,"participation":6577,"gap":6578,"animated":6579,"fate":6580,"excellent":6581,"##ett":6582,"na":6583,"87":6584,"alternate":6585,"saints":6586,"youngest":6587,"##ily":6588,"climbed":6589,"##ita":6590,"##tors":6591,"suggest":6592,"##ct":6593,"discussion":6594,"staying":6595,"choir":6596,"lakes":6597,"jacket":6598,"revenue":6599,"nevertheless":6600,"peaked":6601,"instrument":6602,"wondering":6603,"annually":6604,"managing":6605,"neil":6606,"1891":6607,"signing":6608,"terry":6609,"##ice":6610,"apply":6611,"clinical":6612,"brooklyn":6613,"aim":6614,"catherine":6615,"fuck":6616,"farmers":6617,"figured":6618,"ninth":6619,"pride":6620,"hugh":6621,"evolution":6622,"ordinary":6623,"involvement":6624,"comfortable":6625,"shouted":6626,"tech":6627,"encouraged":6628,"taiwan":6629,"representation":6630,"sharing":6631,"##lia":6632,"##em":6633,"panic":6634,"exact":6635,"cargo":6636,"competing":6637,"fat":6638,"cried":6639,"83":6640,"1920s":6641,"occasions":6642,"pa":6643,"cabin":6644,"borders":6645,"utah":6646,"marcus":6647,"##isation":6648,"badly":6649,"muscles":6650,"##ance":6651,"victorian":6652,"transition":6653,"warner":6654,"bet":6655,"permission":6656,"##rin":6657,"slave":6658,"terrible":6659,"similarly":6660,"shares":6661,"seth":6662,"uefa":6663,"possession":6664,"medals":6665,"benefits":6666,"colleges":6667,"lowered":6668,"perfectly":6669,"mall":6670,"transit":6671,"##ye":6672,"##kar":6673,"publisher":6674,"##ened":6675,"harrison":6676,"deaths":6677,"elevation":6678,"##ae":6679,"asleep":6680,"machines":6681,"sigh":6682,"ash":6683,"hardly":6684,"argument":6685,"occasion":6686,"parent":6687,"leo":6688,"decline":6689,"1888":6690,"contribution":6691,"##ua":6692,"concentration":6693,"1000":6694,"opportunities":6695,"hispanic":6696,"guardian":6697,"extent":6698,"emotions":6699,"hips":6700,"mason":6701,"volumes":6702,"bloody":6703,"controversy":6704,"diameter":6705,"steady":6706,"mistake":6707,"phoenix":6708,"identify":6709,"violin":6710,"##sk":6711,"departure":6712,"richmond":6713,"spin":6714,"funeral":6715,"enemies":6716,"1864":6717,"gear":6718,"literally":6719,"connor":6720,"random":6721,"sergeant":6722,"grab":6723,"confusion":6724,"1865":6725,"transmission":6726,"informed":6727,"op":6728,"leaning":6729,"sacred":6730,"suspended":6731,"thinks":6732,"gates":6733,"portland":6734,"luck":6735,"agencies":6736,"yours":6737,"hull":6738,"expert":6739,"muscle":6740,"layer":6741,"practical":6742,"sculpture":6743,"jerusalem":6744,"latest":6745,"lloyd":6746,"statistics":6747,"deeper":6748,"recommended":6749,"warrior":6750,"arkansas":6751,"mess":6752,"supports":6753,"greg":6754,"eagle":6755,"1880":6756,"recovered":6757,"rated":6758,"concerts":6759,"rushed":6760,"##ano":6761,"stops":6762,"eggs":6763,"files":6764,"premiere":6765,"keith":6766,"##vo":6767,"delhi":6768,"turner":6769,"pit":6770,"affair":6771,"belief":6772,"paint":6773,"##zing":6774,"mate":6775,"##ach":6776,"##ev":6777,"victim":6778,"##ology":6779,"withdrew":6780,"bonus":6781,"styles":6782,"fled":6783,"##ud":6784,"glasgow":6785,"technologies":6786,"funded":6787,"nbc":6788,"adaptation":6789,"##ata":6790,"portrayed":6791,"cooperation":6792,"supporters":6793,"judges":6794,"bernard":6795,"justin":6796,"hallway":6797,"ralph":6798,"##ick":6799,"graduating":6800,"controversial":6801,"distant":6802,"continental":6803,"spider":6804,"bite":6805,"##ho":6806,"recognize":6807,"intention":6808,"mixing":6809,"##ese":6810,"egyptian":6811,"bow":6812,"tourism":6813,"suppose":6814,"claiming":6815,"tiger":6816,"dominated":6817,"participants":6818,"vi":6819,"##ru":6820,"nurse":6821,"partially":6822,"tape":6823,"##rum":6824,"psychology":6825,"##rn":6826,"essential":6827,"touring":6828,"duo":6829,"voting":6830,"civilian":6831,"emotional":6832,"channels":6833,"##king":6834,"apparent":6835,"hebrew":6836,"1887":6837,"tommy":6838,"carrier":6839,"intersection":6840,"beast":6841,"hudson":6842,"##gar":6843,"##zo":6844,"lab":6845,"nova":6846,"bench":6847,"discuss":6848,"costa":6849,"##ered":6850,"detailed":6851,"behalf":6852,"drivers":6853,"unfortunately":6854,"obtain":6855,"##lis":6856,"rocky":6857,"##dae":6858,"siege":6859,"friendship":6860,"honey":6861,"##rian":6862,"1861":6863,"amy":6864,"hang":6865,"posted":6866,"governments":6867,"collins":6868,"respond":6869,"wildlife":6870,"preferred":6871,"operator":6872,"##po":6873,"laura":6874,"pregnant":6875,"videos":6876,"dennis":6877,"suspected":6878,"boots":6879,"instantly":6880,"weird":6881,"automatic":6882,"businessman":6883,"alleged":6884,"placing":6885,"throwing":6886,"ph":6887,"mood":6888,"1862":6889,"perry":6890,"venue":6891,"jet":6892,"remainder":6893,"##lli":6894,"##ci":6895,"passion":6896,"biological":6897,"boyfriend":6898,"1863":6899,"dirt":6900,"buffalo":6901,"ron":6902,"segment":6903,"fa":6904,"abuse":6905,"##era":6906,"genre":6907,"thrown":6908,"stroke":6909,"colored":6910,"stress":6911,"exercise":6912,"displayed":6913,"##gen":6914,"struggled":6915,"##tti":6916,"abroad":6917,"dramatic":6918,"wonderful":6919,"thereafter":6920,"madrid":6921,"component":6922,"widespread":6923,"##sed":6924,"tale":6925,"citizen":6926,"todd":6927,"monday":6928,"1886":6929,"vancouver":6930,"overseas":6931,"forcing":6932,"crying":6933,"descent":6934,"##ris":6935,"discussed":6936,"substantial":6937,"ranks":6938,"regime":6939,"1870":6940,"provinces":6941,"switch":6942,"drum":6943,"zane":6944,"ted":6945,"tribes":6946,"proof":6947,"lp":6948,"cream":6949,"researchers":6950,"volunteer":6951,"manor":6952,"silk":6953,"milan":6954,"donated":6955,"allies":6956,"venture":6957,"principle":6958,"delivery":6959,"enterprise":6960,"##ves":6961,"##ans":6962,"bars":6963,"traditionally":6964,"witch":6965,"reminded":6966,"copper":6967,"##uk":6968,"pete":6969,"inter":6970,"links":6971,"colin":6972,"grinned":6973,"elsewhere":6974,"competitive":6975,"frequent":6976,"##oy":6977,"scream":6978,"##hu":6979,"tension":6980,"texts":6981,"submarine":6982,"finnish":6983,"defending":6984,"defend":6985,"pat":6986,"detail":6987,"1884":6988,"affiliated":6989,"stuart":6990,"themes":6991,"villa":6992,"periods":6993,"tool":6994,"belgian":6995,"ruling":6996,"crimes":6997,"answers":6998,"folded":6999,"licensed":7000,"resort":7001,"demolished":7002,"hans":7003,"lucy":7004,"1881":7005,"lion":7006,"traded":7007,"photographs":7008,"writes":7009,"craig":7010,"##fa":7011,"trials":7012,"generated":7013,"beth":7014,"noble":7015,"debt":7016,"percentage":7017,"yorkshire":7018,"erected":7019,"ss":7020,"viewed":7021,"grades":7022,"confidence":7023,"ceased":7024,"islam":7025,"telephone":7026,"retail":7027,"##ible":7028,"chile":7029,"m²":7030,"roberts":7031,"sixteen":7032,"##ich":7033,"commented":7034,"hampshire":7035,"innocent":7036,"dual":7037,"pounds":7038,"checked":7039,"regulations":7040,"afghanistan":7041,"sung":7042,"rico":7043,"liberty":7044,"assets":7045,"bigger":7046,"options":7047,"angels":7048,"relegated":7049,"tribute":7050,"wells":7051,"attending":7052,"leaf":7053,"##yan":7054,"butler":7055,"romanian":7056,"forum":7057,"monthly":7058,"lisa":7059,"patterns":7060,"gmina":7061,"##tory":7062,"madison":7063,"hurricane":7064,"rev":7065,"##ians":7066,"bristol":7067,"##ula":7068,"elite":7069,"valuable":7070,"disaster":7071,"democracy":7072,"awareness":7073,"germans":7074,"freyja":7075,"##ins":7076,"loop":7077,"absolutely":7078,"paying":7079,"populations":7080,"maine":7081,"sole":7082,"prayer":7083,"spencer":7084,"releases":7085,"doorway":7086,"bull":7087,"##ani":7088,"lover":7089,"midnight":7090,"conclusion":7091,"##sson":7092,"thirteen":7093,"lily":7094,"mediterranean":7095,"##lt":7096,"nhl":7097,"proud":7098,"sample":7099,"##hill":7100,"drummer":7101,"guinea":7102,"##ova":7103,"murphy":7104,"climb":7105,"##ston":7106,"instant":7107,"attributed":7108,"horn":7109,"ain":7110,"railways":7111,"steven":7112,"##ao":7113,"autumn":7114,"ferry":7115,"opponent":7116,"root":7117,"traveling":7118,"secured":7119,"corridor":7120,"stretched":7121,"tales":7122,"sheet":7123,"trinity":7124,"cattle":7125,"helps":7126,"indicates":7127,"manhattan":7128,"murdered":7129,"fitted":7130,"1882":7131,"gentle":7132,"grandmother":7133,"mines":7134,"shocked":7135,"vegas":7136,"produces":7137,"##light":7138,"caribbean":7139,"##ou":7140,"belong":7141,"continuous":7142,"desperate":7143,"drunk":7144,"historically":7145,"trio":7146,"waved":7147,"raf":7148,"dealing":7149,"nathan":7150,"bat":7151,"murmured":7152,"interrupted":7153,"residing":7154,"scientist":7155,"pioneer":7156,"harold":7157,"aaron":7158,"##net":7159,"delta":7160,"attempting":7161,"minority":7162,"mini":7163,"believes":7164,"chorus":7165,"tend":7166,"lots":7167,"eyed":7168,"indoor":7169,"load":7170,"shots":7171,"updated":7172,"jail":7173,"##llo":7174,"concerning":7175,"connecting":7176,"wealth":7177,"##ved":7178,"slaves":7179,"arrive":7180,"rangers":7181,"sufficient":7182,"rebuilt":7183,"##wick":7184,"cardinal":7185,"flood":7186,"muhammad":7187,"whenever":7188,"relation":7189,"runners":7190,"moral":7191,"repair":7192,"viewers":7193,"arriving":7194,"revenge":7195,"punk":7196,"assisted":7197,"bath":7198,"fairly":7199,"breathe":7200,"lists":7201,"innings":7202,"illustrated":7203,"whisper":7204,"nearest":7205,"voters":7206,"clinton":7207,"ties":7208,"ultimate":7209,"screamed":7210,"beijing":7211,"lions":7212,"andre":7213,"fictional":7214,"gathering":7215,"comfort":7216,"radar":7217,"suitable":7218,"dismissed":7219,"hms":7220,"ban":7221,"pine":7222,"wrist":7223,"atmosphere":7224,"voivodeship":7225,"bid":7226,"timber":7227,"##ned":7228,"##nan":7229,"giants":7230,"##ane":7231,"cameron":7232,"recovery":7233,"uss":7234,"identical":7235,"categories":7236,"switched":7237,"serbia":7238,"laughter":7239,"noah":7240,"ensemble":7241,"therapy":7242,"peoples":7243,"touching":7244,"##off":7245,"locally":7246,"pearl":7247,"platforms":7248,"everywhere":7249,"ballet":7250,"tables":7251,"lanka":7252,"herbert":7253,"outdoor":7254,"toured":7255,"derek":7256,"1883":7257,"spaces":7258,"contested":7259,"swept":7260,"1878":7261,"exclusive":7262,"slight":7263,"connections":7264,"##dra":7265,"winds":7266,"prisoner":7267,"collective":7268,"bangladesh":7269,"tube":7270,"publicly":7271,"wealthy":7272,"thai":7273,"##ys":7274,"isolated":7275,"select":7276,"##ric":7277,"insisted":7278,"pen":7279,"fortune":7280,"ticket":7281,"spotted":7282,"reportedly":7283,"animation":7284,"enforcement":7285,"tanks":7286,"110":7287,"decides":7288,"wider":7289,"lowest":7290,"owen":7291,"##time":7292,"nod":7293,"hitting":7294,"##hn":7295,"gregory":7296,"furthermore":7297,"magazines":7298,"fighters":7299,"solutions":7300,"##ery":7301,"pointing":7302,"requested":7303,"peru":7304,"reed":7305,"chancellor":7306,"knights":7307,"mask":7308,"worker":7309,"eldest":7310,"flames":7311,"reduction":7312,"1860":7313,"volunteers":7314,"##tis":7315,"reporting":7316,"##hl":7317,"wire":7318,"advisory":7319,"endemic":7320,"origins":7321,"settlers":7322,"pursue":7323,"knock":7324,"consumer":7325,"1876":7326,"eu":7327,"compound":7328,"creatures":7329,"mansion":7330,"sentenced":7331,"ivan":7332,"deployed":7333,"guitars":7334,"frowned":7335,"involves":7336,"mechanism":7337,"kilometers":7338,"perspective":7339,"shops":7340,"maps":7341,"terminus":7342,"duncan":7343,"alien":7344,"fist":7345,"bridges":7346,"##pers":7347,"heroes":7348,"fed":7349,"derby":7350,"swallowed":7351,"##ros":7352,"patent":7353,"sara":7354,"illness":7355,"characterized":7356,"adventures":7357,"slide":7358,"hawaii":7359,"jurisdiction":7360,"##op":7361,"organised":7362,"##side":7363,"adelaide":7364,"walks":7365,"biology":7366,"se":7367,"##ties":7368,"rogers":7369,"swing":7370,"tightly":7371,"boundaries":7372,"##rie":7373,"prepare":7374,"implementation":7375,"stolen":7376,"##sha":7377,"certified":7378,"colombia":7379,"edwards":7380,"garage":7381,"##mm":7382,"recalled":7383,"##ball":7384,"rage":7385,"harm":7386,"nigeria":7387,"breast":7388,"##ren":7389,"furniture":7390,"pupils":7391,"settle":7392,"##lus":7393,"cuba":7394,"balls":7395,"client":7396,"alaska":7397,"21st":7398,"linear":7399,"thrust":7400,"celebration":7401,"latino":7402,"genetic":7403,"terror":7404,"##cia":7405,"##ening":7406,"lightning":7407,"fee":7408,"witness":7409,"lodge":7410,"establishing":7411,"skull":7412,"##ique":7413,"earning":7414,"hood":7415,"##ei":7416,"rebellion":7417,"wang":7418,"sporting":7419,"warned":7420,"missile":7421,"devoted":7422,"activist":7423,"porch":7424,"worship":7425,"fourteen":7426,"package":7427,"1871":7428,"decorated":7429,"##shire":7430,"housed":7431,"##ock":7432,"chess":7433,"sailed":7434,"doctors":7435,"oscar":7436,"joan":7437,"treat":7438,"garcia":7439,"harbour":7440,"jeremy":7441,"##ire":7442,"traditions":7443,"dominant":7444,"jacques":7445,"##gon":7446,"##wan":7447,"relocated":7448,"1879":7449,"amendment":7450,"sized":7451,"companion":7452,"simultaneously":7453,"volleyball":7454,"spun":7455,"acre":7456,"increases":7457,"stopping":7458,"loves":7459,"belongs":7460,"affect":7461,"drafted":7462,"tossed":7463,"scout":7464,"battles":7465,"1875":7466,"filming":7467,"shoved":7468,"munich":7469,"tenure":7470,"vertical":7471,"romance":7472,"pc":7473,"##cher":7474,"argue":7475,"##ical":7476,"craft":7477,"ranging":7478,"www":7479,"opens":7480,"honest":7481,"tyler":7482,"yesterday":7483,"virtual":7484,"##let":7485,"muslims":7486,"reveal":7487,"snake":7488,"immigrants":7489,"radical":7490,"screaming":7491,"speakers":7492,"firing":7493,"saving":7494,"belonging":7495,"ease":7496,"lighting":7497,"prefecture":7498,"blame":7499,"farmer":7500,"hungry":7501,"grows":7502,"rubbed":7503,"beam":7504,"sur":7505,"subsidiary":7506,"##cha":7507,"armenian":7508,"sao":7509,"dropping":7510,"conventional":7511,"##fer":7512,"microsoft":7513,"reply":7514,"qualify":7515,"spots":7516,"1867":7517,"sweat":7518,"festivals":7519,"##ken":7520,"immigration":7521,"physician":7522,"discover":7523,"exposure":7524,"sandy":7525,"explanation":7526,"isaac":7527,"implemented":7528,"##fish":7529,"hart":7530,"initiated":7531,"connect":7532,"stakes":7533,"presents":7534,"heights":7535,"householder":7536,"pleased":7537,"tourist":7538,"regardless":7539,"slip":7540,"closest":7541,"##ction":7542,"surely":7543,"sultan":7544,"brings":7545,"riley":7546,"preparation":7547,"aboard":7548,"slammed":7549,"baptist":7550,"experiment":7551,"ongoing":7552,"interstate":7553,"organic":7554,"playoffs":7555,"##ika":7556,"1877":7557,"130":7558,"##tar":7559,"hindu":7560,"error":7561,"tours":7562,"tier":7563,"plenty":7564,"arrangements":7565,"talks":7566,"trapped":7567,"excited":7568,"sank":7569,"ho":7570,"athens":7571,"1872":7572,"denver":7573,"welfare":7574,"suburb":7575,"athletes":7576,"trick":7577,"diverse":7578,"belly":7579,"exclusively":7580,"yelled":7581,"1868":7582,"##med":7583,"conversion":7584,"##ette":7585,"1874":7586,"internationally":7587,"computers":7588,"conductor":7589,"abilities":7590,"sensitive":7591,"hello":7592,"dispute":7593,"measured":7594,"globe":7595,"rocket":7596,"prices":7597,"amsterdam":7598,"flights":7599,"tigers":7600,"inn":7601,"municipalities":7602,"emotion":7603,"references":7604,"3d":7605,"##mus":7606,"explains":7607,"airlines":7608,"manufactured":7609,"pm":7610,"archaeological":7611,"1873":7612,"interpretation":7613,"devon":7614,"comment":7615,"##ites":7616,"settlements":7617,"kissing":7618,"absolute":7619,"improvement":7620,"suite":7621,"impressed":7622,"barcelona":7623,"sullivan":7624,"jefferson":7625,"towers":7626,"jesse":7627,"julie":7628,"##tin":7629,"##lu":7630,"grandson":7631,"hi":7632,"gauge":7633,"regard":7634,"rings":7635,"interviews":7636,"trace":7637,"raymond":7638,"thumb":7639,"departments":7640,"burns":7641,"serial":7642,"bulgarian":7643,"scores":7644,"demonstrated":7645,"##ix":7646,"1866":7647,"kyle":7648,"alberta":7649,"underneath":7650,"romanized":7651,"##ward":7652,"relieved":7653,"acquisition":7654,"phrase":7655,"cliff":7656,"reveals":7657,"han":7658,"cuts":7659,"merger":7660,"custom":7661,"##dar":7662,"nee":7663,"gilbert":7664,"graduation":7665,"##nts":7666,"assessment":7667,"cafe":7668,"difficulty":7669,"demands":7670,"swung":7671,"democrat":7672,"jennifer":7673,"commons":7674,"1940s":7675,"grove":7676,"##yo":7677,"completing":7678,"focuses":7679,"sum":7680,"substitute":7681,"bearing":7682,"stretch":7683,"reception":7684,"##py":7685,"reflected":7686,"essentially":7687,"destination":7688,"pairs":7689,"##ched":7690,"survival":7691,"resource":7692,"##bach":7693,"promoting":7694,"doubles":7695,"messages":7696,"tear":7697,"##down":7698,"##fully":7699,"parade":7700,"florence":7701,"harvey":7702,"incumbent":7703,"partial":7704,"framework":7705,"900":7706,"pedro":7707,"frozen":7708,"procedure":7709,"olivia":7710,"controls":7711,"##mic":7712,"shelter":7713,"personally":7714,"temperatures":7715,"##od":7716,"brisbane":7717,"tested":7718,"sits":7719,"marble":7720,"comprehensive":7721,"oxygen":7722,"leonard":7723,"##kov":7724,"inaugural":7725,"iranian":7726,"referring":7727,"quarters":7728,"attitude":7729,"##ivity":7730,"mainstream":7731,"lined":7732,"mars":7733,"dakota":7734,"norfolk":7735,"unsuccessful":7736,"##°":7737,"explosion":7738,"helicopter":7739,"congressional":7740,"##sing":7741,"inspector":7742,"bitch":7743,"seal":7744,"departed":7745,"divine":7746,"##ters":7747,"coaching":7748,"examination":7749,"punishment":7750,"manufacturer":7751,"sink":7752,"columns":7753,"unincorporated":7754,"signals":7755,"nevada":7756,"squeezed":7757,"dylan":7758,"dining":7759,"photos":7760,"martial":7761,"manuel":7762,"eighteen":7763,"elevator":7764,"brushed":7765,"plates":7766,"ministers":7767,"ivy":7768,"congregation":7769,"##len":7770,"slept":7771,"specialized":7772,"taxes":7773,"curve":7774,"restricted":7775,"negotiations":7776,"likes":7777,"statistical":7778,"arnold":7779,"inspiration":7780,"execution":7781,"bold":7782,"intermediate":7783,"significance":7784,"margin":7785,"ruler":7786,"wheels":7787,"gothic":7788,"intellectual":7789,"dependent":7790,"listened":7791,"eligible":7792,"buses":7793,"widow":7794,"syria":7795,"earn":7796,"cincinnati":7797,"collapsed":7798,"recipient":7799,"secrets":7800,"accessible":7801,"philippine":7802,"maritime":7803,"goddess":7804,"clerk":7805,"surrender":7806,"breaks":7807,"playoff":7808,"database":7809,"##ified":7810,"##lon":7811,"ideal":7812,"beetle":7813,"aspect":7814,"soap":7815,"regulation":7816,"strings":7817,"expand":7818,"anglo":7819,"shorter":7820,"crosses":7821,"retreat":7822,"tough":7823,"coins":7824,"wallace":7825,"directions":7826,"pressing":7827,"##oon":7828,"shipping":7829,"locomotives":7830,"comparison":7831,"topics":7832,"nephew":7833,"##mes":7834,"distinction":7835,"honors":7836,"travelled":7837,"sierra":7838,"ibn":7839,"##over":7840,"fortress":7841,"sa":7842,"recognised":7843,"carved":7844,"1869":7845,"clients":7846,"##dan":7847,"intent":7848,"##mar":7849,"coaches":7850,"describing":7851,"bread":7852,"##ington":7853,"beaten":7854,"northwestern":7855,"##ona":7856,"merit":7857,"youtube":7858,"collapse":7859,"challenges":7860,"em":7861,"historians":7862,"objective":7863,"submitted":7864,"virus":7865,"attacking":7866,"drake":7867,"assume":7868,"##ere":7869,"diseases":7870,"marc":7871,"stem":7872,"leeds":7873,"##cus":7874,"##ab":7875,"farming":7876,"glasses":7877,"##lock":7878,"visits":7879,"nowhere":7880,"fellowship":7881,"relevant":7882,"carries":7883,"restaurants":7884,"experiments":7885,"101":7886,"constantly":7887,"bases":7888,"targets":7889,"shah":7890,"tenth":7891,"opponents":7892,"verse":7893,"territorial":7894,"##ira":7895,"writings":7896,"corruption":7897,"##hs":7898,"instruction":7899,"inherited":7900,"reverse":7901,"emphasis":7902,"##vic":7903,"employee":7904,"arch":7905,"keeps":7906,"rabbi":7907,"watson":7908,"payment":7909,"uh":7910,"##ala":7911,"nancy":7912,"##tre":7913,"venice":7914,"fastest":7915,"sexy":7916,"banned":7917,"adrian":7918,"properly":7919,"ruth":7920,"touchdown":7921,"dollar":7922,"boards":7923,"metre":7924,"circles":7925,"edges":7926,"favour":7927,"comments":7928,"ok":7929,"travels":7930,"liberation":7931,"scattered":7932,"firmly":7933,"##ular":7934,"holland":7935,"permitted":7936,"diesel":7937,"kenya":7938,"den":7939,"originated":7940,"##ral":7941,"demons":7942,"resumed":7943,"dragged":7944,"rider":7945,"##rus":7946,"servant":7947,"blinked":7948,"extend":7949,"torn":7950,"##ias":7951,"##sey":7952,"input":7953,"meal":7954,"everybody":7955,"cylinder":7956,"kinds":7957,"camps":7958,"##fe":7959,"bullet":7960,"logic":7961,"##wn":7962,"croatian":7963,"evolved":7964,"healthy":7965,"fool":7966,"chocolate":7967,"wise":7968,"preserve":7969,"pradesh":7970,"##ess":7971,"respective":7972,"1850":7973,"##ew":7974,"chicken":7975,"artificial":7976,"gross":7977,"corresponding":7978,"convicted":7979,"cage":7980,"caroline":7981,"dialogue":7982,"##dor":7983,"narrative":7984,"stranger":7985,"mario":7986,"br":7987,"christianity":7988,"failing":7989,"trent":7990,"commanding":7991,"buddhist":7992,"1848":7993,"maurice":7994,"focusing":7995,"yale":7996,"bike":7997,"altitude":7998,"##ering":7999,"mouse":8000,"revised":8001,"##sley":8002,"veteran":8003,"##ig":8004,"pulls":8005,"theology":8006,"crashed":8007,"campaigns":8008,"legion":8009,"##ability":8010,"drag":8011,"excellence":8012,"customer":8013,"cancelled":8014,"intensity":8015,"excuse":8016,"##lar":8017,"liga":8018,"participating":8019,"contributing":8020,"printing":8021,"##burn":8022,"variable":8023,"##rk":8024,"curious":8025,"bin":8026,"legacy":8027,"renaissance":8028,"##my":8029,"symptoms":8030,"binding":8031,"vocalist":8032,"dancer":8033,"##nie":8034,"grammar":8035,"gospel":8036,"democrats":8037,"ya":8038,"enters":8039,"sc":8040,"diplomatic":8041,"hitler":8042,"##ser":8043,"clouds":8044,"mathematical":8045,"quit":8046,"defended":8047,"oriented":8048,"##heim":8049,"fundamental":8050,"hardware":8051,"impressive":8052,"equally":8053,"convince":8054,"confederate":8055,"guilt":8056,"chuck":8057,"sliding":8058,"##ware":8059,"magnetic":8060,"narrowed":8061,"petersburg":8062,"bulgaria":8063,"otto":8064,"phd":8065,"skill":8066,"##ama":8067,"reader":8068,"hopes":8069,"pitcher":8070,"reservoir":8071,"hearts":8072,"automatically":8073,"expecting":8074,"mysterious":8075,"bennett":8076,"extensively":8077,"imagined":8078,"seeds":8079,"monitor":8080,"fix":8081,"##ative":8082,"journalism":8083,"struggling":8084,"signature":8085,"ranch":8086,"encounter":8087,"photographer":8088,"observation":8089,"protests":8090,"##pin":8091,"influences":8092,"##hr":8093,"calendar":8094,"##all":8095,"cruz":8096,"croatia":8097,"locomotive":8098,"hughes":8099,"naturally":8100,"shakespeare":8101,"basement":8102,"hook":8103,"uncredited":8104,"faded":8105,"theories":8106,"approaches":8107,"dare":8108,"phillips":8109,"filling":8110,"fury":8111,"obama":8112,"##ain":8113,"efficient":8114,"arc":8115,"deliver":8116,"min":8117,"raid":8118,"breeding":8119,"inducted":8120,"leagues":8121,"efficiency":8122,"axis":8123,"montana":8124,"eagles":8125,"##ked":8126,"supplied":8127,"instructions":8128,"karen":8129,"picking":8130,"indicating":8131,"trap":8132,"anchor":8133,"practically":8134,"christians":8135,"tomb":8136,"vary":8137,"occasional":8138,"electronics":8139,"lords":8140,"readers":8141,"newcastle":8142,"faint":8143,"innovation":8144,"collect":8145,"situations":8146,"engagement":8147,"160":8148,"claude":8149,"mixture":8150,"##feld":8151,"peer":8152,"tissue":8153,"logo":8154,"lean":8155,"##ration":8156,"°f":8157,"floors":8158,"##ven":8159,"architects":8160,"reducing":8161,"##our":8162,"##ments":8163,"rope":8164,"1859":8165,"ottawa":8166,"##har":8167,"samples":8168,"banking":8169,"declaration":8170,"proteins":8171,"resignation":8172,"francois":8173,"saudi":8174,"advocate":8175,"exhibited":8176,"armor":8177,"twins":8178,"divorce":8179,"##ras":8180,"abraham":8181,"reviewed":8182,"jo":8183,"temporarily":8184,"matrix":8185,"physically":8186,"pulse":8187,"curled":8188,"##ena":8189,"difficulties":8190,"bengal":8191,"usage":8192,"##ban":8193,"annie":8194,"riders":8195,"certificate":8196,"##pi":8197,"holes":8198,"warsaw":8199,"distinctive":8200,"jessica":8201,"##mon":8202,"mutual":8203,"1857":8204,"customs":8205,"circular":8206,"eugene":8207,"removal":8208,"loaded":8209,"mere":8210,"vulnerable":8211,"depicted":8212,"generations":8213,"dame":8214,"heir":8215,"enormous":8216,"lightly":8217,"climbing":8218,"pitched":8219,"lessons":8220,"pilots":8221,"nepal":8222,"ram":8223,"google":8224,"preparing":8225,"brad":8226,"louise":8227,"renowned":8228,"##₂":8229,"liam":8230,"##ably":8231,"plaza":8232,"shaw":8233,"sophie":8234,"brilliant":8235,"bills":8236,"##bar":8237,"##nik":8238,"fucking":8239,"mainland":8240,"server":8241,"pleasant":8242,"seized":8243,"veterans":8244,"jerked":8245,"fail":8246,"beta":8247,"brush":8248,"radiation":8249,"stored":8250,"warmth":8251,"southeastern":8252,"nate":8253,"sin":8254,"raced":8255,"berkeley":8256,"joke":8257,"athlete":8258,"designation":8259,"trunk":8260,"##low":8261,"roland":8262,"qualification":8263,"archives":8264,"heels":8265,"artwork":8266,"receives":8267,"judicial":8268,"reserves":8269,"##bed":8270,"woke":8271,"installation":8272,"abu":8273,"floating":8274,"fake":8275,"lesser":8276,"excitement":8277,"interface":8278,"concentrated":8279,"addressed":8280,"characteristic":8281,"amanda":8282,"saxophone":8283,"monk":8284,"auto":8285,"##bus":8286,"releasing":8287,"egg":8288,"dies":8289,"interaction":8290,"defender":8291,"ce":8292,"outbreak":8293,"glory":8294,"loving":8295,"##bert":8296,"sequel":8297,"consciousness":8298,"http":8299,"awake":8300,"ski":8301,"enrolled":8302,"##ress":8303,"handling":8304,"rookie":8305,"brow":8306,"somebody":8307,"biography":8308,"warfare":8309,"amounts":8310,"contracts":8311,"presentation":8312,"fabric":8313,"dissolved":8314,"challenged":8315,"meter":8316,"psychological":8317,"lt":8318,"elevated":8319,"rally":8320,"accurate":8321,"##tha":8322,"hospitals":8323,"undergraduate":8324,"specialist":8325,"venezuela":8326,"exhibit":8327,"shed":8328,"nursing":8329,"protestant":8330,"fluid":8331,"structural":8332,"footage":8333,"jared":8334,"consistent":8335,"prey":8336,"##ska":8337,"succession":8338,"reflect":8339,"exile":8340,"lebanon":8341,"wiped":8342,"suspect":8343,"shanghai":8344,"resting":8345,"integration":8346,"preservation":8347,"marvel":8348,"variant":8349,"pirates":8350,"sheep":8351,"rounded":8352,"capita":8353,"sailing":8354,"colonies":8355,"manuscript":8356,"deemed":8357,"variations":8358,"clarke":8359,"functional":8360,"emerging":8361,"boxing":8362,"relaxed":8363,"curse":8364,"azerbaijan":8365,"heavyweight":8366,"nickname":8367,"editorial":8368,"rang":8369,"grid":8370,"tightened":8371,"earthquake":8372,"flashed":8373,"miguel":8374,"rushing":8375,"##ches":8376,"improvements":8377,"boxes":8378,"brooks":8379,"180":8380,"consumption":8381,"molecular":8382,"felix":8383,"societies":8384,"repeatedly":8385,"variation":8386,"aids":8387,"civic":8388,"graphics":8389,"professionals":8390,"realm":8391,"autonomous":8392,"receiver":8393,"delayed":8394,"workshop":8395,"militia":8396,"chairs":8397,"trump":8398,"canyon":8399,"##point":8400,"harsh":8401,"extending":8402,"lovely":8403,"happiness":8404,"##jan":8405,"stake":8406,"eyebrows":8407,"embassy":8408,"wellington":8409,"hannah":8410,"##ella":8411,"sony":8412,"corners":8413,"bishops":8414,"swear":8415,"cloth":8416,"contents":8417,"xi":8418,"namely":8419,"commenced":8420,"1854":8421,"stanford":8422,"nashville":8423,"courage":8424,"graphic":8425,"commitment":8426,"garrison":8427,"##bin":8428,"hamlet":8429,"clearing":8430,"rebels":8431,"attraction":8432,"literacy":8433,"cooking":8434,"ruins":8435,"temples":8436,"jenny":8437,"humanity":8438,"celebrate":8439,"hasn":8440,"freight":8441,"sixty":8442,"rebel":8443,"bastard":8444,"##art":8445,"newton":8446,"##ada":8447,"deer":8448,"##ges":8449,"##ching":8450,"smiles":8451,"delaware":8452,"singers":8453,"##ets":8454,"approaching":8455,"assists":8456,"flame":8457,"##ph":8458,"boulevard":8459,"barrel":8460,"planted":8461,"##ome":8462,"pursuit":8463,"##sia":8464,"consequences":8465,"posts":8466,"shallow":8467,"invitation":8468,"rode":8469,"depot":8470,"ernest":8471,"kane":8472,"rod":8473,"concepts":8474,"preston":8475,"topic":8476,"chambers":8477,"striking":8478,"blast":8479,"arrives":8480,"descendants":8481,"montgomery":8482,"ranges":8483,"worlds":8484,"##lay":8485,"##ari":8486,"span":8487,"chaos":8488,"praise":8489,"##ag":8490,"fewer":8491,"1855":8492,"sanctuary":8493,"mud":8494,"fbi":8495,"##ions":8496,"programmes":8497,"maintaining":8498,"unity":8499,"harper":8500,"bore":8501,"handsome":8502,"closure":8503,"tournaments":8504,"thunder":8505,"nebraska":8506,"linda":8507,"facade":8508,"puts":8509,"satisfied":8510,"argentine":8511,"dale":8512,"cork":8513,"dome":8514,"panama":8515,"##yl":8516,"1858":8517,"tasks":8518,"experts":8519,"##ates":8520,"feeding":8521,"equation":8522,"##las":8523,"##ida":8524,"##tu":8525,"engage":8526,"bryan":8527,"##ax":8528,"um":8529,"quartet":8530,"melody":8531,"disbanded":8532,"sheffield":8533,"blocked":8534,"gasped":8535,"delay":8536,"kisses":8537,"maggie":8538,"connects":8539,"##non":8540,"sts":8541,"poured":8542,"creator":8543,"publishers":8544,"##we":8545,"guided":8546,"ellis":8547,"extinct":8548,"hug":8549,"gaining":8550,"##ord":8551,"complicated":8552,"##bility":8553,"poll":8554,"clenched":8555,"investigate":8556,"##use":8557,"thereby":8558,"quantum":8559,"spine":8560,"cdp":8561,"humor":8562,"kills":8563,"administered":8564,"semifinals":8565,"##du":8566,"encountered":8567,"ignore":8568,"##bu":8569,"commentary":8570,"##maker":8571,"bother":8572,"roosevelt":8573,"140":8574,"plains":8575,"halfway":8576,"flowing":8577,"cultures":8578,"crack":8579,"imprisoned":8580,"neighboring":8581,"airline":8582,"##ses":8583,"##view":8584,"##mate":8585,"##ec":8586,"gather":8587,"wolves":8588,"marathon":8589,"transformed":8590,"##ill":8591,"cruise":8592,"organisations":8593,"carol":8594,"punch":8595,"exhibitions":8596,"numbered":8597,"alarm":8598,"ratings":8599,"daddy":8600,"silently":8601,"##stein":8602,"queens":8603,"colours":8604,"impression":8605,"guidance":8606,"liu":8607,"tactical":8608,"##rat":8609,"marshal":8610,"della":8611,"arrow":8612,"##ings":8613,"rested":8614,"feared":8615,"tender":8616,"owns":8617,"bitter":8618,"advisor":8619,"escort":8620,"##ides":8621,"spare":8622,"farms":8623,"grants":8624,"##ene":8625,"dragons":8626,"encourage":8627,"colleagues":8628,"cameras":8629,"##und":8630,"sucked":8631,"pile":8632,"spirits":8633,"prague":8634,"statements":8635,"suspension":8636,"landmark":8637,"fence":8638,"torture":8639,"recreation":8640,"bags":8641,"permanently":8642,"survivors":8643,"pond":8644,"spy":8645,"predecessor":8646,"bombing":8647,"coup":8648,"##og":8649,"protecting":8650,"transformation":8651,"glow":8652,"##lands":8653,"##book":8654,"dug":8655,"priests":8656,"andrea":8657,"feat":8658,"barn":8659,"jumping":8660,"##chen":8661,"##ologist":8662,"##con":8663,"casualties":8664,"stern":8665,"auckland":8666,"pipe":8667,"serie":8668,"revealing":8669,"ba":8670,"##bel":8671,"trevor":8672,"mercy":8673,"spectrum":8674,"yang":8675,"consist":8676,"governing":8677,"collaborated":8678,"possessed":8679,"epic":8680,"comprises":8681,"blew":8682,"shane":8683,"##ack":8684,"lopez":8685,"honored":8686,"magical":8687,"sacrifice":8688,"judgment":8689,"perceived":8690,"hammer":8691,"mtv":8692,"baronet":8693,"tune":8694,"das":8695,"missionary":8696,"sheets":8697,"350":8698,"neutral":8699,"oral":8700,"threatening":8701,"attractive":8702,"shade":8703,"aims":8704,"seminary":8705,"##master":8706,"estates":8707,"1856":8708,"michel":8709,"wounds":8710,"refugees":8711,"manufacturers":8712,"##nic":8713,"mercury":8714,"syndrome":8715,"porter":8716,"##iya":8717,"##din":8718,"hamburg":8719,"identification":8720,"upstairs":8721,"purse":8722,"widened":8723,"pause":8724,"cared":8725,"breathed":8726,"affiliate":8727,"santiago":8728,"prevented":8729,"celtic":8730,"fisher":8731,"125":8732,"recruited":8733,"byzantine":8734,"reconstruction":8735,"farther":8736,"##mp":8737,"diet":8738,"sake":8739,"au":8740,"spite":8741,"sensation":8742,"##ert":8743,"blank":8744,"separation":8745,"105":8746,"##hon":8747,"vladimir":8748,"armies":8749,"anime":8750,"##lie":8751,"accommodate":8752,"orbit":8753,"cult":8754,"sofia":8755,"archive":8756,"##ify":8757,"##box":8758,"founders":8759,"sustained":8760,"disorder":8761,"honours":8762,"northeastern":8763,"mia":8764,"crops":8765,"violet":8766,"threats":8767,"blanket":8768,"fires":8769,"canton":8770,"followers":8771,"southwestern":8772,"prototype":8773,"voyage":8774,"assignment":8775,"altered":8776,"moderate":8777,"protocol":8778,"pistol":8779,"##eo":8780,"questioned":8781,"brass":8782,"lifting":8783,"1852":8784,"math":8785,"authored":8786,"##ual":8787,"doug":8788,"dimensional":8789,"dynamic":8790,"##san":8791,"1851":8792,"pronounced":8793,"grateful":8794,"quest":8795,"uncomfortable":8796,"boom":8797,"presidency":8798,"stevens":8799,"relating":8800,"politicians":8801,"chen":8802,"barrier":8803,"quinn":8804,"diana":8805,"mosque":8806,"tribal":8807,"cheese":8808,"palmer":8809,"portions":8810,"sometime":8811,"chester":8812,"treasure":8813,"wu":8814,"bend":8815,"download":8816,"millions":8817,"reforms":8818,"registration":8819,"##osa":8820,"consequently":8821,"monitoring":8822,"ate":8823,"preliminary":8824,"brandon":8825,"invented":8826,"ps":8827,"eaten":8828,"exterior":8829,"intervention":8830,"ports":8831,"documented":8832,"log":8833,"displays":8834,"lecture":8835,"sally":8836,"favourite":8837,"##itz":8838,"vermont":8839,"lo":8840,"invisible":8841,"isle":8842,"breed":8843,"##ator":8844,"journalists":8845,"relay":8846,"speaks":8847,"backward":8848,"explore":8849,"midfielder":8850,"actively":8851,"stefan":8852,"procedures":8853,"cannon":8854,"blond":8855,"kenneth":8856,"centered":8857,"servants":8858,"chains":8859,"libraries":8860,"malcolm":8861,"essex":8862,"henri":8863,"slavery":8864,"##hal":8865,"facts":8866,"fairy":8867,"coached":8868,"cassie":8869,"cats":8870,"washed":8871,"cop":8872,"##fi":8873,"announcement":8874,"item":8875,"2000s":8876,"vinyl":8877,"activated":8878,"marco":8879,"frontier":8880,"growled":8881,"curriculum":8882,"##das":8883,"loyal":8884,"accomplished":8885,"leslie":8886,"ritual":8887,"kenny":8888,"##00":8889,"vii":8890,"napoleon":8891,"hollow":8892,"hybrid":8893,"jungle":8894,"stationed":8895,"friedrich":8896,"counted":8897,"##ulated":8898,"platinum":8899,"theatrical":8900,"seated":8901,"col":8902,"rubber":8903,"glen":8904,"1840":8905,"diversity":8906,"healing":8907,"extends":8908,"id":8909,"provisions":8910,"administrator":8911,"columbus":8912,"##oe":8913,"tributary":8914,"te":8915,"assured":8916,"org":8917,"##uous":8918,"prestigious":8919,"examined":8920,"lectures":8921,"grammy":8922,"ronald":8923,"associations":8924,"bailey":8925,"allan":8926,"essays":8927,"flute":8928,"believing":8929,"consultant":8930,"proceedings":8931,"travelling":8932,"1853":8933,"kit":8934,"kerala":8935,"yugoslavia":8936,"buddy":8937,"methodist":8938,"##ith":8939,"burial":8940,"centres":8941,"batman":8942,"##nda":8943,"discontinued":8944,"bo":8945,"dock":8946,"stockholm":8947,"lungs":8948,"severely":8949,"##nk":8950,"citing":8951,"manga":8952,"##ugh":8953,"steal":8954,"mumbai":8955,"iraqi":8956,"robot":8957,"celebrity":8958,"bride":8959,"broadcasts":8960,"abolished":8961,"pot":8962,"joel":8963,"overhead":8964,"franz":8965,"packed":8966,"reconnaissance":8967,"johann":8968,"acknowledged":8969,"introduce":8970,"handled":8971,"doctorate":8972,"developments":8973,"drinks":8974,"alley":8975,"palestine":8976,"##nis":8977,"##aki":8978,"proceeded":8979,"recover":8980,"bradley":8981,"grain":8982,"patch":8983,"afford":8984,"infection":8985,"nationalist":8986,"legendary":8987,"##ath":8988,"interchange":8989,"virtually":8990,"gen":8991,"gravity":8992,"exploration":8993,"amber":8994,"vital":8995,"wishes":8996,"powell":8997,"doctrine":8998,"elbow":8999,"screenplay":9000,"##bird":9001,"contribute":9002,"indonesian":9003,"pet":9004,"creates":9005,"##com":9006,"enzyme":9007,"kylie":9008,"discipline":9009,"drops":9010,"manila":9011,"hunger":9012,"##ien":9013,"layers":9014,"suffer":9015,"fever":9016,"bits":9017,"monica":9018,"keyboard":9019,"manages":9020,"##hood":9021,"searched":9022,"appeals":9023,"##bad":9024,"testament":9025,"grande":9026,"reid":9027,"##war":9028,"beliefs":9029,"congo":9030,"##ification":9031,"##dia":9032,"si":9033,"requiring":9034,"##via":9035,"casey":9036,"1849":9037,"regret":9038,"streak":9039,"rape":9040,"depends":9041,"syrian":9042,"sprint":9043,"pound":9044,"tourists":9045,"upcoming":9046,"pub":9047,"##xi":9048,"tense":9049,"##els":9050,"practiced":9051,"echo":9052,"nationwide":9053,"guild":9054,"motorcycle":9055,"liz":9056,"##zar":9057,"chiefs":9058,"desired":9059,"elena":9060,"bye":9061,"precious":9062,"absorbed":9063,"relatives":9064,"booth":9065,"pianist":9066,"##mal":9067,"citizenship":9068,"exhausted":9069,"wilhelm":9070,"##ceae":9071,"##hed":9072,"noting":9073,"quarterback":9074,"urge":9075,"hectares":9076,"##gue":9077,"ace":9078,"holly":9079,"##tal":9080,"blonde":9081,"davies":9082,"parked":9083,"sustainable":9084,"stepping":9085,"twentieth":9086,"airfield":9087,"galaxy":9088,"nest":9089,"chip":9090,"##nell":9091,"tan":9092,"shaft":9093,"paulo":9094,"requirement":9095,"##zy":9096,"paradise":9097,"tobacco":9098,"trans":9099,"renewed":9100,"vietnamese":9101,"##cker":9102,"##ju":9103,"suggesting":9104,"catching":9105,"holmes":9106,"enjoying":9107,"md":9108,"trips":9109,"colt":9110,"holder":9111,"butterfly":9112,"nerve":9113,"reformed":9114,"cherry":9115,"bowling":9116,"trailer":9117,"carriage":9118,"goodbye":9119,"appreciate":9120,"toy":9121,"joshua":9122,"interactive":9123,"enabled":9124,"involve":9125,"##kan":9126,"collar":9127,"determination":9128,"bunch":9129,"facebook":9130,"recall":9131,"shorts":9132,"superintendent":9133,"episcopal":9134,"frustration":9135,"giovanni":9136,"nineteenth":9137,"laser":9138,"privately":9139,"array":9140,"circulation":9141,"##ovic":9142,"armstrong":9143,"deals":9144,"painful":9145,"permit":9146,"discrimination":9147,"##wi":9148,"aires":9149,"retiring":9150,"cottage":9151,"ni":9152,"##sta":9153,"horizon":9154,"ellen":9155,"jamaica":9156,"ripped":9157,"fernando":9158,"chapters":9159,"playstation":9160,"patron":9161,"lecturer":9162,"navigation":9163,"behaviour":9164,"genes":9165,"georgian":9166,"export":9167,"solomon":9168,"rivals":9169,"swift":9170,"seventeen":9171,"rodriguez":9172,"princeton":9173,"independently":9174,"sox":9175,"1847":9176,"arguing":9177,"entity":9178,"casting":9179,"hank":9180,"criteria":9181,"oakland":9182,"geographic":9183,"milwaukee":9184,"reflection":9185,"expanding":9186,"conquest":9187,"dubbed":9188,"##tv":9189,"halt":9190,"brave":9191,"brunswick":9192,"doi":9193,"arched":9194,"curtis":9195,"divorced":9196,"predominantly":9197,"somerset":9198,"streams":9199,"ugly":9200,"zoo":9201,"horrible":9202,"curved":9203,"buenos":9204,"fierce":9205,"dictionary":9206,"vector":9207,"theological":9208,"unions":9209,"handful":9210,"stability":9211,"chan":9212,"punjab":9213,"segments":9214,"##lly":9215,"altar":9216,"ignoring":9217,"gesture":9218,"monsters":9219,"pastor":9220,"##stone":9221,"thighs":9222,"unexpected":9223,"operators":9224,"abruptly":9225,"coin":9226,"compiled":9227,"associates":9228,"improving":9229,"migration":9230,"pin":9231,"##ose":9232,"compact":9233,"collegiate":9234,"reserved":9235,"##urs":9236,"quarterfinals":9237,"roster":9238,"restore":9239,"assembled":9240,"hurry":9241,"oval":9242,"##cies":9243,"1846":9244,"flags":9245,"martha":9246,"##del":9247,"victories":9248,"sharply":9249,"##rated":9250,"argues":9251,"deadly":9252,"neo":9253,"drawings":9254,"symbols":9255,"performer":9256,"##iel":9257,"griffin":9258,"restrictions":9259,"editing":9260,"andrews":9261,"java":9262,"journals":9263,"arabia":9264,"compositions":9265,"dee":9266,"pierce":9267,"removing":9268,"hindi":9269,"casino":9270,"runway":9271,"civilians":9272,"minds":9273,"nasa":9274,"hotels":9275,"##zation":9276,"refuge":9277,"rent":9278,"retain":9279,"potentially":9280,"conferences":9281,"suburban":9282,"conducting":9283,"##tto":9284,"##tions":9285,"##tle":9286,"descended":9287,"massacre":9288,"##cal":9289,"ammunition":9290,"terrain":9291,"fork":9292,"souls":9293,"counts":9294,"chelsea":9295,"durham":9296,"drives":9297,"cab":9298,"##bank":9299,"perth":9300,"realizing":9301,"palestinian":9302,"finn":9303,"simpson":9304,"##dal":9305,"betty":9306,"##ule":9307,"moreover":9308,"particles":9309,"cardinals":9310,"tent":9311,"evaluation":9312,"extraordinary":9313,"##oid":9314,"inscription":9315,"##works":9316,"wednesday":9317,"chloe":9318,"maintains":9319,"panels":9320,"ashley":9321,"trucks":9322,"##nation":9323,"cluster":9324,"sunlight":9325,"strikes":9326,"zhang":9327,"##wing":9328,"dialect":9329,"canon":9330,"##ap":9331,"tucked":9332,"##ws":9333,"collecting":9334,"##mas":9335,"##can":9336,"##sville":9337,"maker":9338,"quoted":9339,"evan":9340,"franco":9341,"aria":9342,"buying":9343,"cleaning":9344,"eva":9345,"closet":9346,"provision":9347,"apollo":9348,"clinic":9349,"rat":9350,"##ez":9351,"necessarily":9352,"ac":9353,"##gle":9354,"##ising":9355,"venues":9356,"flipped":9357,"cent":9358,"spreading":9359,"trustees":9360,"checking":9361,"authorized":9362,"##sco":9363,"disappointed":9364,"##ado":9365,"notion":9366,"duration":9367,"trumpet":9368,"hesitated":9369,"topped":9370,"brussels":9371,"rolls":9372,"theoretical":9373,"hint":9374,"define":9375,"aggressive":9376,"repeat":9377,"wash":9378,"peaceful":9379,"optical":9380,"width":9381,"allegedly":9382,"mcdonald":9383,"strict":9384,"copyright":9385,"##illa":9386,"investors":9387,"mar":9388,"jam":9389,"witnesses":9390,"sounding":9391,"miranda":9392,"michelle":9393,"privacy":9394,"hugo":9395,"harmony":9396,"##pp":9397,"valid":9398,"lynn":9399,"glared":9400,"nina":9401,"102":9402,"headquartered":9403,"diving":9404,"boarding":9405,"gibson":9406,"##ncy":9407,"albanian":9408,"marsh":9409,"routine":9410,"dealt":9411,"enhanced":9412,"er":9413,"intelligent":9414,"substance":9415,"targeted":9416,"enlisted":9417,"discovers":9418,"spinning":9419,"observations":9420,"pissed":9421,"smoking":9422,"rebecca":9423,"capitol":9424,"visa":9425,"varied":9426,"costume":9427,"seemingly":9428,"indies":9429,"compensation":9430,"surgeon":9431,"thursday":9432,"arsenal":9433,"westminster":9434,"suburbs":9435,"rid":9436,"anglican":9437,"##ridge":9438,"knots":9439,"foods":9440,"alumni":9441,"lighter":9442,"fraser":9443,"whoever":9444,"portal":9445,"scandal":9446,"##ray":9447,"gavin":9448,"advised":9449,"instructor":9450,"flooding":9451,"terrorist":9452,"##ale":9453,"teenage":9454,"interim":9455,"senses":9456,"duck":9457,"teen":9458,"thesis":9459,"abby":9460,"eager":9461,"overcome":9462,"##ile":9463,"newport":9464,"glenn":9465,"rises":9466,"shame":9467,"##cc":9468,"prompted":9469,"priority":9470,"forgot":9471,"bomber":9472,"nicolas":9473,"protective":9474,"360":9475,"cartoon":9476,"katherine":9477,"breeze":9478,"lonely":9479,"trusted":9480,"henderson":9481,"richardson":9482,"relax":9483,"banner":9484,"candy":9485,"palms":9486,"remarkable":9487,"##rio":9488,"legends":9489,"cricketer":9490,"essay":9491,"ordained":9492,"edmund":9493,"rifles":9494,"trigger":9495,"##uri":9496,"##away":9497,"sail":9498,"alert":9499,"1830":9500,"audiences":9501,"penn":9502,"sussex":9503,"siblings":9504,"pursued":9505,"indianapolis":9506,"resist":9507,"rosa":9508,"consequence":9509,"succeed":9510,"avoided":9511,"1845":9512,"##ulation":9513,"inland":9514,"##tie":9515,"##nna":9516,"counsel":9517,"profession":9518,"chronicle":9519,"hurried":9520,"##una":9521,"eyebrow":9522,"eventual":9523,"bleeding":9524,"innovative":9525,"cure":9526,"##dom":9527,"committees":9528,"accounting":9529,"con":9530,"scope":9531,"hardy":9532,"heather":9533,"tenor":9534,"gut":9535,"herald":9536,"codes":9537,"tore":9538,"scales":9539,"wagon":9540,"##oo":9541,"luxury":9542,"tin":9543,"prefer":9544,"fountain":9545,"triangle":9546,"bonds":9547,"darling":9548,"convoy":9549,"dried":9550,"traced":9551,"beings":9552,"troy":9553,"accidentally":9554,"slam":9555,"findings":9556,"smelled":9557,"joey":9558,"lawyers":9559,"outcome":9560,"steep":9561,"bosnia":9562,"configuration":9563,"shifting":9564,"toll":9565,"brook":9566,"performers":9567,"lobby":9568,"philosophical":9569,"construct":9570,"shrine":9571,"aggregate":9572,"boot":9573,"cox":9574,"phenomenon":9575,"savage":9576,"insane":9577,"solely":9578,"reynolds":9579,"lifestyle":9580,"##ima":9581,"nationally":9582,"holdings":9583,"consideration":9584,"enable":9585,"edgar":9586,"mo":9587,"mama":9588,"##tein":9589,"fights":9590,"relegation":9591,"chances":9592,"atomic":9593,"hub":9594,"conjunction":9595,"awkward":9596,"reactions":9597,"currency":9598,"finale":9599,"kumar":9600,"underwent":9601,"steering":9602,"elaborate":9603,"gifts":9604,"comprising":9605,"melissa":9606,"veins":9607,"reasonable":9608,"sunshine":9609,"chi":9610,"solve":9611,"trails":9612,"inhabited":9613,"elimination":9614,"ethics":9615,"huh":9616,"ana":9617,"molly":9618,"consent":9619,"apartments":9620,"layout":9621,"marines":9622,"##ces":9623,"hunters":9624,"bulk":9625,"##oma":9626,"hometown":9627,"##wall":9628,"##mont":9629,"cracked":9630,"reads":9631,"neighbouring":9632,"withdrawn":9633,"admission":9634,"wingspan":9635,"damned":9636,"anthology":9637,"lancashire":9638,"brands":9639,"batting":9640,"forgive":9641,"cuban":9642,"awful":9643,"##lyn":9644,"104":9645,"dimensions":9646,"imagination":9647,"##ade":9648,"dante":9649,"##ship":9650,"tracking":9651,"desperately":9652,"goalkeeper":9653,"##yne":9654,"groaned":9655,"workshops":9656,"confident":9657,"burton":9658,"gerald":9659,"milton":9660,"circus":9661,"uncertain":9662,"slope":9663,"copenhagen":9664,"sophia":9665,"fog":9666,"philosopher":9667,"portraits":9668,"accent":9669,"cycling":9670,"varying":9671,"gripped":9672,"larvae":9673,"garrett":9674,"specified":9675,"scotia":9676,"mature":9677,"luther":9678,"kurt":9679,"rap":9680,"##kes":9681,"aerial":9682,"750":9683,"ferdinand":9684,"heated":9685,"es":9686,"transported":9687,"##shan":9688,"safely":9689,"nonetheless":9690,"##orn":9691,"##gal":9692,"motors":9693,"demanding":9694,"##sburg":9695,"startled":9696,"##brook":9697,"ally":9698,"generate":9699,"caps":9700,"ghana":9701,"stained":9702,"demo":9703,"mentions":9704,"beds":9705,"ap":9706,"afterward":9707,"diary":9708,"##bling":9709,"utility":9710,"##iro":9711,"richards":9712,"1837":9713,"conspiracy":9714,"conscious":9715,"shining":9716,"footsteps":9717,"observer":9718,"cyprus":9719,"urged":9720,"loyalty":9721,"developer":9722,"probability":9723,"olive":9724,"upgraded":9725,"gym":9726,"miracle":9727,"insects":9728,"graves":9729,"1844":9730,"ourselves":9731,"hydrogen":9732,"amazon":9733,"katie":9734,"tickets":9735,"poets":9736,"##pm":9737,"planes":9738,"##pan":9739,"prevention":9740,"witnessed":9741,"dense":9742,"jin":9743,"randy":9744,"tang":9745,"warehouse":9746,"monroe":9747,"bang":9748,"archived":9749,"elderly":9750,"investigations":9751,"alec":9752,"granite":9753,"mineral":9754,"conflicts":9755,"controlling":9756,"aboriginal":9757,"carlo":9758,"##zu":9759,"mechanics":9760,"stan":9761,"stark":9762,"rhode":9763,"skirt":9764,"est":9765,"##berry":9766,"bombs":9767,"respected":9768,"##horn":9769,"imposed":9770,"limestone":9771,"deny":9772,"nominee":9773,"memphis":9774,"grabbing":9775,"disabled":9776,"##als":9777,"amusement":9778,"aa":9779,"frankfurt":9780,"corn":9781,"referendum":9782,"varies":9783,"slowed":9784,"disk":9785,"firms":9786,"unconscious":9787,"incredible":9788,"clue":9789,"sue":9790,"##zhou":9791,"twist":9792,"##cio":9793,"joins":9794,"idaho":9795,"chad":9796,"developers":9797,"computing":9798,"destroyer":9799,"103":9800,"mortal":9801,"tucker":9802,"kingston":9803,"choices":9804,"yu":9805,"carson":9806,"1800":9807,"os":9808,"whitney":9809,"geneva":9810,"pretend":9811,"dimension":9812,"staged":9813,"plateau":9814,"maya":9815,"##une":9816,"freestyle":9817,"##bc":9818,"rovers":9819,"hiv":9820,"##ids":9821,"tristan":9822,"classroom":9823,"prospect":9824,"##hus":9825,"honestly":9826,"diploma":9827,"lied":9828,"thermal":9829,"auxiliary":9830,"feast":9831,"unlikely":9832,"iata":9833,"##tel":9834,"morocco":9835,"pounding":9836,"treasury":9837,"lithuania":9838,"considerably":9839,"1841":9840,"dish":9841,"1812":9842,"geological":9843,"matching":9844,"stumbled":9845,"destroying":9846,"marched":9847,"brien":9848,"advances":9849,"cake":9850,"nicole":9851,"belle":9852,"settling":9853,"measuring":9854,"directing":9855,"##mie":9856,"tuesday":9857,"bassist":9858,"capabilities":9859,"stunned":9860,"fraud":9861,"torpedo":9862,"##list":9863,"##phone":9864,"anton":9865,"wisdom":9866,"surveillance":9867,"ruined":9868,"##ulate":9869,"lawsuit":9870,"healthcare":9871,"theorem":9872,"halls":9873,"trend":9874,"aka":9875,"horizontal":9876,"dozens":9877,"acquire":9878,"lasting":9879,"swim":9880,"hawk":9881,"gorgeous":9882,"fees":9883,"vicinity":9884,"decrease":9885,"adoption":9886,"tactics":9887,"##ography":9888,"pakistani":9889,"##ole":9890,"draws":9891,"##hall":9892,"willie":9893,"burke":9894,"heath":9895,"algorithm":9896,"integral":9897,"powder":9898,"elliott":9899,"brigadier":9900,"jackie":9901,"tate":9902,"varieties":9903,"darker":9904,"##cho":9905,"lately":9906,"cigarette":9907,"specimens":9908,"adds":9909,"##ree":9910,"##ensis":9911,"##inger":9912,"exploded":9913,"finalist":9914,"cia":9915,"murders":9916,"wilderness":9917,"arguments":9918,"nicknamed":9919,"acceptance":9920,"onwards":9921,"manufacture":9922,"robertson":9923,"jets":9924,"tampa":9925,"enterprises":9926,"blog":9927,"loudly":9928,"composers":9929,"nominations":9930,"1838":9931,"ai":9932,"malta":9933,"inquiry":9934,"automobile":9935,"hosting":9936,"viii":9937,"rays":9938,"tilted":9939,"grief":9940,"museums":9941,"strategies":9942,"furious":9943,"euro":9944,"equality":9945,"cohen":9946,"poison":9947,"surrey":9948,"wireless":9949,"governed":9950,"ridiculous":9951,"moses":9952,"##esh":9953,"##room":9954,"vanished":9955,"##ito":9956,"barnes":9957,"attract":9958,"morrison":9959,"istanbul":9960,"##iness":9961,"absent":9962,"rotation":9963,"petition":9964,"janet":9965,"##logical":9966,"satisfaction":9967,"custody":9968,"deliberately":9969,"observatory":9970,"comedian":9971,"surfaces":9972,"pinyin":9973,"novelist":9974,"strictly":9975,"canterbury":9976,"oslo":9977,"monks":9978,"embrace":9979,"ibm":9980,"jealous":9981,"photograph":9982,"continent":9983,"dorothy":9984,"marina":9985,"doc":9986,"excess":9987,"holden":9988,"allegations":9989,"explaining":9990,"stack":9991,"avoiding":9992,"lance":9993,"storyline":9994,"majesty":9995,"poorly":9996,"spike":9997,"dos":9998,"bradford":9999,"raven":10000,"travis":10001,"classics":10002,"proven":10003,"voltage":10004,"pillow":10005,"fists":10006,"butt":10007,"1842":10008,"interpreted":10009,"##car":10010,"1839":10011,"gage":10012,"telegraph":10013,"lens":10014,"promising":10015,"expelled":10016,"casual":10017,"collector":10018,"zones":10019,"##min":10020,"silly":10021,"nintendo":10022,"##kh":10023,"##bra":10024,"downstairs":10025,"chef":10026,"suspicious":10027,"afl":10028,"flies":10029,"vacant":10030,"uganda":10031,"pregnancy":10032,"condemned":10033,"lutheran":10034,"estimates":10035,"cheap":10036,"decree":10037,"saxon":10038,"proximity":10039,"stripped":10040,"idiot":10041,"deposits":10042,"contrary":10043,"presenter":10044,"magnus":10045,"glacier":10046,"im":10047,"offense":10048,"edwin":10049,"##ori":10050,"upright":10051,"##long":10052,"bolt":10053,"##ois":10054,"toss":10055,"geographical":10056,"##izes":10057,"environments":10058,"delicate":10059,"marking":10060,"abstract":10061,"xavier":10062,"nails":10063,"windsor":10064,"plantation":10065,"occurring":10066,"equity":10067,"saskatchewan":10068,"fears":10069,"drifted":10070,"sequences":10071,"vegetation":10072,"revolt":10073,"##stic":10074,"1843":10075,"sooner":10076,"fusion":10077,"opposing":10078,"nato":10079,"skating":10080,"1836":10081,"secretly":10082,"ruin":10083,"lease":10084,"##oc":10085,"edit":10086,"##nne":10087,"flora":10088,"anxiety":10089,"ruby":10090,"##ological":10091,"##mia":10092,"tel":10093,"bout":10094,"taxi":10095,"emmy":10096,"frost":10097,"rainbow":10098,"compounds":10099,"foundations":10100,"rainfall":10101,"assassination":10102,"nightmare":10103,"dominican":10104,"##win":10105,"achievements":10106,"deserve":10107,"orlando":10108,"intact":10109,"armenia":10110,"##nte":10111,"calgary":10112,"valentine":10113,"106":10114,"marion":10115,"proclaimed":10116,"theodore":10117,"bells":10118,"courtyard":10119,"thigh":10120,"gonzalez":10121,"console":10122,"troop":10123,"minimal":10124,"monte":10125,"everyday":10126,"##ence":10127,"##if":10128,"supporter":10129,"terrorism":10130,"buck":10131,"openly":10132,"presbyterian":10133,"activists":10134,"carpet":10135,"##iers":10136,"rubbing":10137,"uprising":10138,"##yi":10139,"cute":10140,"conceived":10141,"legally":10142,"##cht":10143,"millennium":10144,"cello":10145,"velocity":10146,"ji":10147,"rescued":10148,"cardiff":10149,"1835":10150,"rex":10151,"concentrate":10152,"senators":10153,"beard":10154,"rendered":10155,"glowing":10156,"battalions":10157,"scouts":10158,"competitors":10159,"sculptor":10160,"catalogue":10161,"arctic":10162,"ion":10163,"raja":10164,"bicycle":10165,"wow":10166,"glancing":10167,"lawn":10168,"##woman":10169,"gentleman":10170,"lighthouse":10171,"publish":10172,"predicted":10173,"calculated":10174,"##val":10175,"variants":10176,"##gne":10177,"strain":10178,"##ui":10179,"winston":10180,"deceased":10181,"##nus":10182,"touchdowns":10183,"brady":10184,"caleb":10185,"sinking":10186,"echoed":10187,"crush":10188,"hon":10189,"blessed":10190,"protagonist":10191,"hayes":10192,"endangered":10193,"magnitude":10194,"editors":10195,"##tine":10196,"estimate":10197,"responsibilities":10198,"##mel":10199,"backup":10200,"laying":10201,"consumed":10202,"sealed":10203,"zurich":10204,"lovers":10205,"frustrated":10206,"##eau":10207,"ahmed":10208,"kicking":10209,"mit":10210,"treasurer":10211,"1832":10212,"biblical":10213,"refuse":10214,"terrified":10215,"pump":10216,"agrees":10217,"genuine":10218,"imprisonment":10219,"refuses":10220,"plymouth":10221,"##hen":10222,"lou":10223,"##nen":10224,"tara":10225,"trembling":10226,"antarctic":10227,"ton":10228,"learns":10229,"##tas":10230,"crap":10231,"crucial":10232,"faction":10233,"atop":10234,"##borough":10235,"wrap":10236,"lancaster":10237,"odds":10238,"hopkins":10239,"erik":10240,"lyon":10241,"##eon":10242,"bros":10243,"##ode":10244,"snap":10245,"locality":10246,"tips":10247,"empress":10248,"crowned":10249,"cal":10250,"acclaimed":10251,"chuckled":10252,"##ory":10253,"clara":10254,"sends":10255,"mild":10256,"towel":10257,"##fl":10258,"##day":10259,"##а":10260,"wishing":10261,"assuming":10262,"interviewed":10263,"##bal":10264,"##die":10265,"interactions":10266,"eden":10267,"cups":10268,"helena":10269,"##lf":10270,"indie":10271,"beck":10272,"##fire":10273,"batteries":10274,"filipino":10275,"wizard":10276,"parted":10277,"##lam":10278,"traces":10279,"##born":10280,"rows":10281,"idol":10282,"albany":10283,"delegates":10284,"##ees":10285,"##sar":10286,"discussions":10287,"##ex":10288,"notre":10289,"instructed":10290,"belgrade":10291,"highways":10292,"suggestion":10293,"lauren":10294,"possess":10295,"orientation":10296,"alexandria":10297,"abdul":10298,"beats":10299,"salary":10300,"reunion":10301,"ludwig":10302,"alright":10303,"wagner":10304,"intimate":10305,"pockets":10306,"slovenia":10307,"hugged":10308,"brighton":10309,"merchants":10310,"cruel":10311,"stole":10312,"trek":10313,"slopes":10314,"repairs":10315,"enrollment":10316,"politically":10317,"underlying":10318,"promotional":10319,"counting":10320,"boeing":10321,"##bb":10322,"isabella":10323,"naming":10324,"##и":10325,"keen":10326,"bacteria":10327,"listing":10328,"separately":10329,"belfast":10330,"ussr":10331,"450":10332,"lithuanian":10333,"anybody":10334,"ribs":10335,"sphere":10336,"martinez":10337,"cock":10338,"embarrassed":10339,"proposals":10340,"fragments":10341,"nationals":10342,"##fs":10343,"##wski":10344,"premises":10345,"fin":10346,"1500":10347,"alpine":10348,"matched":10349,"freely":10350,"bounded":10351,"jace":10352,"sleeve":10353,"##af":10354,"gaming":10355,"pier":10356,"populated":10357,"evident":10358,"##like":10359,"frances":10360,"flooded":10361,"##dle":10362,"frightened":10363,"pour":10364,"trainer":10365,"framed":10366,"visitor":10367,"challenging":10368,"pig":10369,"wickets":10370,"##fold":10371,"infected":10372,"email":10373,"##pes":10374,"arose":10375,"##aw":10376,"reward":10377,"ecuador":10378,"oblast":10379,"vale":10380,"ch":10381,"shuttle":10382,"##usa":10383,"bach":10384,"rankings":10385,"forbidden":10386,"cornwall":10387,"accordance":10388,"salem":10389,"consumers":10390,"bruno":10391,"fantastic":10392,"toes":10393,"machinery":10394,"resolved":10395,"julius":10396,"remembering":10397,"propaganda":10398,"iceland":10399,"bombardment":10400,"tide":10401,"contacts":10402,"wives":10403,"##rah":10404,"concerto":10405,"macdonald":10406,"albania":10407,"implement":10408,"daisy":10409,"tapped":10410,"sudan":10411,"helmet":10412,"angela":10413,"mistress":10414,"##lic":10415,"crop":10416,"sunk":10417,"finest":10418,"##craft":10419,"hostile":10420,"##ute":10421,"##tsu":10422,"boxer":10423,"fr":10424,"paths":10425,"adjusted":10426,"habit":10427,"ballot":10428,"supervision":10429,"soprano":10430,"##zen":10431,"bullets":10432,"wicked":10433,"sunset":10434,"regiments":10435,"disappear":10436,"lamp":10437,"performs":10438,"app":10439,"##gia":10440,"##oa":10441,"rabbit":10442,"digging":10443,"incidents":10444,"entries":10445,"##cion":10446,"dishes":10447,"##oi":10448,"introducing":10449,"##ati":10450,"##fied":10451,"freshman":10452,"slot":10453,"jill":10454,"tackles":10455,"baroque":10456,"backs":10457,"##iest":10458,"lone":10459,"sponsor":10460,"destiny":10461,"altogether":10462,"convert":10463,"##aro":10464,"consensus":10465,"shapes":10466,"demonstration":10467,"basically":10468,"feminist":10469,"auction":10470,"artifacts":10471,"##bing":10472,"strongest":10473,"twitter":10474,"halifax":10475,"2019":10476,"allmusic":10477,"mighty":10478,"smallest":10479,"precise":10480,"alexandra":10481,"viola":10482,"##los":10483,"##ille":10484,"manuscripts":10485,"##illo":10486,"dancers":10487,"ari":10488,"managers":10489,"monuments":10490,"blades":10491,"barracks":10492,"springfield":10493,"maiden":10494,"consolidated":10495,"electron":10496,"##end":10497,"berry":10498,"airing":10499,"wheat":10500,"nobel":10501,"inclusion":10502,"blair":10503,"payments":10504,"geography":10505,"bee":10506,"cc":10507,"eleanor":10508,"react":10509,"##hurst":10510,"afc":10511,"manitoba":10512,"##yu":10513,"su":10514,"lineup":10515,"fitness":10516,"recreational":10517,"investments":10518,"airborne":10519,"disappointment":10520,"##dis":10521,"edmonton":10522,"viewing":10523,"##row":10524,"renovation":10525,"##cast":10526,"infant":10527,"bankruptcy":10528,"roses":10529,"aftermath":10530,"pavilion":10531,"##yer":10532,"carpenter":10533,"withdrawal":10534,"ladder":10535,"##hy":10536,"discussing":10537,"popped":10538,"reliable":10539,"agreements":10540,"rochester":10541,"##abad":10542,"curves":10543,"bombers":10544,"220":10545,"rao":10546,"reverend":10547,"decreased":10548,"choosing":10549,"107":10550,"stiff":10551,"consulting":10552,"naples":10553,"crawford":10554,"tracy":10555,"ka":10556,"ribbon":10557,"cops":10558,"##lee":10559,"crushed":10560,"deciding":10561,"unified":10562,"teenager":10563,"accepting":10564,"flagship":10565,"explorer":10566,"poles":10567,"sanchez":10568,"inspection":10569,"revived":10570,"skilled":10571,"induced":10572,"exchanged":10573,"flee":10574,"locals":10575,"tragedy":10576,"swallow":10577,"loading":10578,"hanna":10579,"demonstrate":10580,"##ela":10581,"salvador":10582,"flown":10583,"contestants":10584,"civilization":10585,"##ines":10586,"wanna":10587,"rhodes":10588,"fletcher":10589,"hector":10590,"knocking":10591,"considers":10592,"##ough":10593,"nash":10594,"mechanisms":10595,"sensed":10596,"mentally":10597,"walt":10598,"unclear":10599,"##eus":10600,"renovated":10601,"madame":10602,"##cks":10603,"crews":10604,"governmental":10605,"##hin":10606,"undertaken":10607,"monkey":10608,"##ben":10609,"##ato":10610,"fatal":10611,"armored":10612,"copa":10613,"caves":10614,"governance":10615,"grasp":10616,"perception":10617,"certification":10618,"froze":10619,"damp":10620,"tugged":10621,"wyoming":10622,"##rg":10623,"##ero":10624,"newman":10625,"##lor":10626,"nerves":10627,"curiosity":10628,"graph":10629,"115":10630,"##ami":10631,"withdraw":10632,"tunnels":10633,"dull":10634,"meredith":10635,"moss":10636,"exhibits":10637,"neighbors":10638,"communicate":10639,"accuracy":10640,"explored":10641,"raiders":10642,"republicans":10643,"secular":10644,"kat":10645,"superman":10646,"penny":10647,"criticised":10648,"##tch":10649,"freed":10650,"update":10651,"conviction":10652,"wade":10653,"ham":10654,"likewise":10655,"delegation":10656,"gotta":10657,"doll":10658,"promises":10659,"technological":10660,"myth":10661,"nationality":10662,"resolve":10663,"convent":10664,"##mark":10665,"sharon":10666,"dig":10667,"sip":10668,"coordinator":10669,"entrepreneur":10670,"fold":10671,"##dine":10672,"capability":10673,"councillor":10674,"synonym":10675,"blown":10676,"swan":10677,"cursed":10678,"1815":10679,"jonas":10680,"haired":10681,"sofa":10682,"canvas":10683,"keeper":10684,"rivalry":10685,"##hart":10686,"rapper":10687,"speedway":10688,"swords":10689,"postal":10690,"maxwell":10691,"estonia":10692,"potter":10693,"recurring":10694,"##nn":10695,"##ave":10696,"errors":10697,"##oni":10698,"cognitive":10699,"1834":10700,"##²":10701,"claws":10702,"nadu":10703,"roberto":10704,"bce":10705,"wrestler":10706,"ellie":10707,"##ations":10708,"infinite":10709,"ink":10710,"##tia":10711,"presumably":10712,"finite":10713,"staircase":10714,"108":10715,"noel":10716,"patricia":10717,"nacional":10718,"##cation":10719,"chill":10720,"eternal":10721,"tu":10722,"preventing":10723,"prussia":10724,"fossil":10725,"limbs":10726,"##logist":10727,"ernst":10728,"frog":10729,"perez":10730,"rene":10731,"##ace":10732,"pizza":10733,"prussian":10734,"##ios":10735,"##vy":10736,"molecules":10737,"regulatory":10738,"answering":10739,"opinions":10740,"sworn":10741,"lengths":10742,"supposedly":10743,"hypothesis":10744,"upward":10745,"habitats":10746,"seating":10747,"ancestors":10748,"drank":10749,"yield":10750,"hd":10751,"synthesis":10752,"researcher":10753,"modest":10754,"##var":10755,"mothers":10756,"peered":10757,"voluntary":10758,"homeland":10759,"##the":10760,"acclaim":10761,"##igan":10762,"static":10763,"valve":10764,"luxembourg":10765,"alto":10766,"carroll":10767,"fe":10768,"receptor":10769,"norton":10770,"ambulance":10771,"##tian":10772,"johnston":10773,"catholics":10774,"depicting":10775,"jointly":10776,"elephant":10777,"gloria":10778,"mentor":10779,"badge":10780,"ahmad":10781,"distinguish":10782,"remarked":10783,"councils":10784,"precisely":10785,"allison":10786,"advancing":10787,"detection":10788,"crowded":10789,"##10":10790,"cooperative":10791,"ankle":10792,"mercedes":10793,"dagger":10794,"surrendered":10795,"pollution":10796,"commit":10797,"subway":10798,"jeffrey":10799,"lesson":10800,"sculptures":10801,"provider":10802,"##fication":10803,"membrane":10804,"timothy":10805,"rectangular":10806,"fiscal":10807,"heating":10808,"teammate":10809,"basket":10810,"particle":10811,"anonymous":10812,"deployment":10813,"##ple":10814,"missiles":10815,"courthouse":10816,"proportion":10817,"shoe":10818,"sec":10819,"##ller":10820,"complaints":10821,"forbes":10822,"blacks":10823,"abandon":10824,"remind":10825,"sizes":10826,"overwhelming":10827,"autobiography":10828,"natalie":10829,"##awa":10830,"risks":10831,"contestant":10832,"countryside":10833,"babies":10834,"scorer":10835,"invaded":10836,"enclosed":10837,"proceed":10838,"hurling":10839,"disorders":10840,"##cu":10841,"reflecting":10842,"continuously":10843,"cruiser":10844,"graduates":10845,"freeway":10846,"investigated":10847,"ore":10848,"deserved":10849,"maid":10850,"blocking":10851,"phillip":10852,"jorge":10853,"shakes":10854,"dove":10855,"mann":10856,"variables":10857,"lacked":10858,"burden":10859,"accompanying":10860,"que":10861,"consistently":10862,"organizing":10863,"provisional":10864,"complained":10865,"endless":10866,"##rm":10867,"tubes":10868,"juice":10869,"georges":10870,"krishna":10871,"mick":10872,"labels":10873,"thriller":10874,"##uch":10875,"laps":10876,"arcade":10877,"sage":10878,"snail":10879,"##table":10880,"shannon":10881,"fi":10882,"laurence":10883,"seoul":10884,"vacation":10885,"presenting":10886,"hire":10887,"churchill":10888,"surprisingly":10889,"prohibited":10890,"savannah":10891,"technically":10892,"##oli":10893,"170":10894,"##lessly":10895,"testimony":10896,"suited":10897,"speeds":10898,"toys":10899,"romans":10900,"mlb":10901,"flowering":10902,"measurement":10903,"talented":10904,"kay":10905,"settings":10906,"charleston":10907,"expectations":10908,"shattered":10909,"achieving":10910,"triumph":10911,"ceremonies":10912,"portsmouth":10913,"lanes":10914,"mandatory":10915,"loser":10916,"stretching":10917,"cologne":10918,"realizes":10919,"seventy":10920,"cornell":10921,"careers":10922,"webb":10923,"##ulating":10924,"americas":10925,"budapest":10926,"ava":10927,"suspicion":10928,"##ison":10929,"yo":10930,"conrad":10931,"##hai":10932,"sterling":10933,"jessie":10934,"rector":10935,"##az":10936,"1831":10937,"transform":10938,"organize":10939,"loans":10940,"christine":10941,"volcanic":10942,"warrant":10943,"slender":10944,"summers":10945,"subfamily":10946,"newer":10947,"danced":10948,"dynamics":10949,"rhine":10950,"proceeds":10951,"heinrich":10952,"gastropod":10953,"commands":10954,"sings":10955,"facilitate":10956,"easter":10957,"ra":10958,"positioned":10959,"responses":10960,"expense":10961,"fruits":10962,"yanked":10963,"imported":10964,"25th":10965,"velvet":10966,"vic":10967,"primitive":10968,"tribune":10969,"baldwin":10970,"neighbourhood":10971,"donna":10972,"rip":10973,"hay":10974,"pr":10975,"##uro":10976,"1814":10977,"espn":10978,"welcomed":10979,"##aria":10980,"qualifier":10981,"glare":10982,"highland":10983,"timing":10984,"##cted":10985,"shells":10986,"eased":10987,"geometry":10988,"louder":10989,"exciting":10990,"slovakia":10991,"##sion":10992,"##iz":10993,"##lot":10994,"savings":10995,"prairie":10996,"##ques":10997,"marching":10998,"rafael":10999,"tonnes":11000,"##lled":11001,"curtain":11002,"preceding":11003,"shy":11004,"heal":11005,"greene":11006,"worthy":11007,"##pot":11008,"detachment":11009,"bury":11010,"sherman":11011,"##eck":11012,"reinforced":11013,"seeks":11014,"bottles":11015,"contracted":11016,"duchess":11017,"outfit":11018,"walsh":11019,"##sc":11020,"mickey":11021,"##ase":11022,"geoffrey":11023,"archer":11024,"squeeze":11025,"dawson":11026,"eliminate":11027,"invention":11028,"##enberg":11029,"neal":11030,"##eth":11031,"stance":11032,"dealer":11033,"coral":11034,"maple":11035,"retire":11036,"polo":11037,"simplified":11038,"##ht":11039,"1833":11040,"hid":11041,"watts":11042,"backwards":11043,"jules":11044,"##oke":11045,"genesis":11046,"mt":11047,"frames":11048,"rebounds":11049,"burma":11050,"woodland":11051,"moist":11052,"santos":11053,"whispers":11054,"drained":11055,"subspecies":11056,"##aa":11057,"streaming":11058,"ulster":11059,"burnt":11060,"correspondence":11061,"maternal":11062,"gerard":11063,"denis":11064,"stealing":11065,"##load":11066,"genius":11067,"duchy":11068,"##oria":11069,"inaugurated":11070,"momentum":11071,"suits":11072,"placement":11073,"sovereign":11074,"clause":11075,"thames":11076,"##hara":11077,"confederation":11078,"reservation":11079,"sketch":11080,"yankees":11081,"lets":11082,"rotten":11083,"charm":11084,"hal":11085,"verses":11086,"ultra":11087,"commercially":11088,"dot":11089,"salon":11090,"citation":11091,"adopt":11092,"winnipeg":11093,"mist":11094,"allocated":11095,"cairo":11096,"##boy":11097,"jenkins":11098,"interference":11099,"objectives":11100,"##wind":11101,"1820":11102,"portfolio":11103,"armoured":11104,"sectors":11105,"##eh":11106,"initiatives":11107,"##world":11108,"integrity":11109,"exercises":11110,"robe":11111,"tap":11112,"ab":11113,"gazed":11114,"##tones":11115,"distracted":11116,"rulers":11117,"111":11118,"favorable":11119,"jerome":11120,"tended":11121,"cart":11122,"factories":11123,"##eri":11124,"diplomat":11125,"valued":11126,"gravel":11127,"charitable":11128,"##try":11129,"calvin":11130,"exploring":11131,"chang":11132,"shepherd":11133,"terrace":11134,"pdf":11135,"pupil":11136,"##ural":11137,"reflects":11138,"ups":11139,"##rch":11140,"governors":11141,"shelf":11142,"depths":11143,"##nberg":11144,"trailed":11145,"crest":11146,"tackle":11147,"##nian":11148,"##ats":11149,"hatred":11150,"##kai":11151,"clare":11152,"makers":11153,"ethiopia":11154,"longtime":11155,"detected":11156,"embedded":11157,"lacking":11158,"slapped":11159,"rely":11160,"thomson":11161,"anticipation":11162,"iso":11163,"morton":11164,"successive":11165,"agnes":11166,"screenwriter":11167,"straightened":11168,"philippe":11169,"playwright":11170,"haunted":11171,"licence":11172,"iris":11173,"intentions":11174,"sutton":11175,"112":11176,"logical":11177,"correctly":11178,"##weight":11179,"branded":11180,"licked":11181,"tipped":11182,"silva":11183,"ricky":11184,"narrator":11185,"requests":11186,"##ents":11187,"greeted":11188,"supernatural":11189,"cow":11190,"##wald":11191,"lung":11192,"refusing":11193,"employer":11194,"strait":11195,"gaelic":11196,"liner":11197,"##piece":11198,"zoe":11199,"sabha":11200,"##mba":11201,"driveway":11202,"harvest":11203,"prints":11204,"bates":11205,"reluctantly":11206,"threshold":11207,"algebra":11208,"ira":11209,"wherever":11210,"coupled":11211,"240":11212,"assumption":11213,"picks":11214,"##air":11215,"designers":11216,"raids":11217,"gentlemen":11218,"##ean":11219,"roller":11220,"blowing":11221,"leipzig":11222,"locks":11223,"screw":11224,"dressing":11225,"strand":11226,"##lings":11227,"scar":11228,"dwarf":11229,"depicts":11230,"##nu":11231,"nods":11232,"##mine":11233,"differ":11234,"boris":11235,"##eur":11236,"yuan":11237,"flip":11238,"##gie":11239,"mob":11240,"invested":11241,"questioning":11242,"applying":11243,"##ture":11244,"shout":11245,"##sel":11246,"gameplay":11247,"blamed":11248,"illustrations":11249,"bothered":11250,"weakness":11251,"rehabilitation":11252,"##of":11253,"##zes":11254,"envelope":11255,"rumors":11256,"miners":11257,"leicester":11258,"subtle":11259,"kerry":11260,"##ico":11261,"ferguson":11262,"##fu":11263,"premiership":11264,"ne":11265,"##cat":11266,"bengali":11267,"prof":11268,"catches":11269,"remnants":11270,"dana":11271,"##rily":11272,"shouting":11273,"presidents":11274,"baltic":11275,"ought":11276,"ghosts":11277,"dances":11278,"sailors":11279,"shirley":11280,"fancy":11281,"dominic":11282,"##bie":11283,"madonna":11284,"##rick":11285,"bark":11286,"buttons":11287,"gymnasium":11288,"ashes":11289,"liver":11290,"toby":11291,"oath":11292,"providence":11293,"doyle":11294,"evangelical":11295,"nixon":11296,"cement":11297,"carnegie":11298,"embarked":11299,"hatch":11300,"surroundings":11301,"guarantee":11302,"needing":11303,"pirate":11304,"essence":11305,"##bee":11306,"filter":11307,"crane":11308,"hammond":11309,"projected":11310,"immune":11311,"percy":11312,"twelfth":11313,"##ult":11314,"regent":11315,"doctoral":11316,"damon":11317,"mikhail":11318,"##ichi":11319,"lu":11320,"critically":11321,"elect":11322,"realised":11323,"abortion":11324,"acute":11325,"screening":11326,"mythology":11327,"steadily":11328,"##fc":11329,"frown":11330,"nottingham":11331,"kirk":11332,"wa":11333,"minneapolis":11334,"##rra":11335,"module":11336,"algeria":11337,"mc":11338,"nautical":11339,"encounters":11340,"surprising":11341,"statues":11342,"availability":11343,"shirts":11344,"pie":11345,"alma":11346,"brows":11347,"munster":11348,"mack":11349,"soup":11350,"crater":11351,"tornado":11352,"sanskrit":11353,"cedar":11354,"explosive":11355,"bordered":11356,"dixon":11357,"planets":11358,"stamp":11359,"exam":11360,"happily":11361,"##bble":11362,"carriers":11363,"kidnapped":11364,"##vis":11365,"accommodation":11366,"emigrated":11367,"##met":11368,"knockout":11369,"correspondent":11370,"violation":11371,"profits":11372,"peaks":11373,"lang":11374,"specimen":11375,"agenda":11376,"ancestry":11377,"pottery":11378,"spelling":11379,"equations":11380,"obtaining":11381,"ki":11382,"linking":11383,"1825":11384,"debris":11385,"asylum":11386,"##20":11387,"buddhism":11388,"teddy":11389,"##ants":11390,"gazette":11391,"##nger":11392,"##sse":11393,"dental":11394,"eligibility":11395,"utc":11396,"fathers":11397,"averaged":11398,"zimbabwe":11399,"francesco":11400,"coloured":11401,"hissed":11402,"translator":11403,"lynch":11404,"mandate":11405,"humanities":11406,"mackenzie":11407,"uniforms":11408,"lin":11409,"##iana":11410,"##gio":11411,"asset":11412,"mhz":11413,"fitting":11414,"samantha":11415,"genera":11416,"wei":11417,"rim":11418,"beloved":11419,"shark":11420,"riot":11421,"entities":11422,"expressions":11423,"indo":11424,"carmen":11425,"slipping":11426,"owing":11427,"abbot":11428,"neighbor":11429,"sidney":11430,"##av":11431,"rats":11432,"recommendations":11433,"encouraging":11434,"squadrons":11435,"anticipated":11436,"commanders":11437,"conquered":11438,"##oto":11439,"donations":11440,"diagnosed":11441,"##mond":11442,"divide":11443,"##iva":11444,"guessed":11445,"decoration":11446,"vernon":11447,"auditorium":11448,"revelation":11449,"conversations":11450,"##kers":11451,"##power":11452,"herzegovina":11453,"dash":11454,"alike":11455,"protested":11456,"lateral":11457,"herman":11458,"accredited":11459,"mg":11460,"##gent":11461,"freeman":11462,"mel":11463,"fiji":11464,"crow":11465,"crimson":11466,"##rine":11467,"livestock":11468,"##pped":11469,"humanitarian":11470,"bored":11471,"oz":11472,"whip":11473,"##lene":11474,"##ali":11475,"legitimate":11476,"alter":11477,"grinning":11478,"spelled":11479,"anxious":11480,"oriental":11481,"wesley":11482,"##nin":11483,"##hole":11484,"carnival":11485,"controller":11486,"detect":11487,"##ssa":11488,"bowed":11489,"educator":11490,"kosovo":11491,"macedonia":11492,"##sin":11493,"occupy":11494,"mastering":11495,"stephanie":11496,"janeiro":11497,"para":11498,"unaware":11499,"nurses":11500,"noon":11501,"135":11502,"cam":11503,"hopefully":11504,"ranger":11505,"combine":11506,"sociology":11507,"polar":11508,"rica":11509,"##eer":11510,"neill":11511,"##sman":11512,"holocaust":11513,"##ip":11514,"doubled":11515,"lust":11516,"1828":11517,"109":11518,"decent":11519,"cooling":11520,"unveiled":11521,"##card":11522,"1829":11523,"nsw":11524,"homer":11525,"chapman":11526,"meyer":11527,"##gin":11528,"dive":11529,"mae":11530,"reagan":11531,"expertise":11532,"##gled":11533,"darwin":11534,"brooke":11535,"sided":11536,"prosecution":11537,"investigating":11538,"comprised":11539,"petroleum":11540,"genres":11541,"reluctant":11542,"differently":11543,"trilogy":11544,"johns":11545,"vegetables":11546,"corpse":11547,"highlighted":11548,"lounge":11549,"pension":11550,"unsuccessfully":11551,"elegant":11552,"aided":11553,"ivory":11554,"beatles":11555,"amelia":11556,"cain":11557,"dubai":11558,"sunny":11559,"immigrant":11560,"babe":11561,"click":11562,"##nder":11563,"underwater":11564,"pepper":11565,"combining":11566,"mumbled":11567,"atlas":11568,"horns":11569,"accessed":11570,"ballad":11571,"physicians":11572,"homeless":11573,"gestured":11574,"rpm":11575,"freak":11576,"louisville":11577,"corporations":11578,"patriots":11579,"prizes":11580,"rational":11581,"warn":11582,"modes":11583,"decorative":11584,"overnight":11585,"din":11586,"troubled":11587,"phantom":11588,"##ort":11589,"monarch":11590,"sheer":11591,"##dorf":11592,"generals":11593,"guidelines":11594,"organs":11595,"addresses":11596,"##zon":11597,"enhance":11598,"curling":11599,"parishes":11600,"cord":11601,"##kie":11602,"linux":11603,"caesar":11604,"deutsche":11605,"bavaria":11606,"##bia":11607,"coleman":11608,"cyclone":11609,"##eria":11610,"bacon":11611,"petty":11612,"##yama":11613,"##old":11614,"hampton":11615,"diagnosis":11616,"1824":11617,"throws":11618,"complexity":11619,"rita":11620,"disputed":11621,"##₃":11622,"pablo":11623,"##sch":11624,"marketed":11625,"trafficking":11626,"##ulus":11627,"examine":11628,"plague":11629,"formats":11630,"##oh":11631,"vault":11632,"faithful":11633,"##bourne":11634,"webster":11635,"##ox":11636,"highlights":11637,"##ient":11638,"##ann":11639,"phones":11640,"vacuum":11641,"sandwich":11642,"modeling":11643,"##gated":11644,"bolivia":11645,"clergy":11646,"qualities":11647,"isabel":11648,"##nas":11649,"##ars":11650,"wears":11651,"screams":11652,"reunited":11653,"annoyed":11654,"bra":11655,"##ancy":11656,"##rate":11657,"differential":11658,"transmitter":11659,"tattoo":11660,"container":11661,"poker":11662,"##och":11663,"excessive":11664,"resides":11665,"cowboys":11666,"##tum":11667,"augustus":11668,"trash":11669,"providers":11670,"statute":11671,"retreated":11672,"balcony":11673,"reversed":11674,"void":11675,"storey":11676,"preceded":11677,"masses":11678,"leap":11679,"laughs":11680,"neighborhoods":11681,"wards":11682,"schemes":11683,"falcon":11684,"santo":11685,"battlefield":11686,"pad":11687,"ronnie":11688,"thread":11689,"lesbian":11690,"venus":11691,"##dian":11692,"beg":11693,"sandstone":11694,"daylight":11695,"punched":11696,"gwen":11697,"analog":11698,"stroked":11699,"wwe":11700,"acceptable":11701,"measurements":11702,"dec":11703,"toxic":11704,"##kel":11705,"adequate":11706,"surgical":11707,"economist":11708,"parameters":11709,"varsity":11710,"##sberg":11711,"quantity":11712,"ella":11713,"##chy":11714,"##rton":11715,"countess":11716,"generating":11717,"precision":11718,"diamonds":11719,"expressway":11720,"ga":11721,"##ı":11722,"1821":11723,"uruguay":11724,"talents":11725,"galleries":11726,"expenses":11727,"scanned":11728,"colleague":11729,"outlets":11730,"ryder":11731,"lucien":11732,"##ila":11733,"paramount":11734,"##bon":11735,"syracuse":11736,"dim":11737,"fangs":11738,"gown":11739,"sweep":11740,"##sie":11741,"toyota":11742,"missionaries":11743,"websites":11744,"##nsis":11745,"sentences":11746,"adviser":11747,"val":11748,"trademark":11749,"spells":11750,"##plane":11751,"patience":11752,"starter":11753,"slim":11754,"##borg":11755,"toe":11756,"incredibly":11757,"shoots":11758,"elliot":11759,"nobility":11760,"##wyn":11761,"cowboy":11762,"endorsed":11763,"gardner":11764,"tendency":11765,"persuaded":11766,"organisms":11767,"emissions":11768,"kazakhstan":11769,"amused":11770,"boring":11771,"chips":11772,"themed":11773,"##hand":11774,"llc":11775,"constantinople":11776,"chasing":11777,"systematic":11778,"guatemala":11779,"borrowed":11780,"erin":11781,"carey":11782,"##hard":11783,"highlands":11784,"struggles":11785,"1810":11786,"##ifying":11787,"##ced":11788,"wong":11789,"exceptions":11790,"develops":11791,"enlarged":11792,"kindergarten":11793,"castro":11794,"##ern":11795,"##rina":11796,"leigh":11797,"zombie":11798,"juvenile":11799,"##most":11800,"consul":11801,"##nar":11802,"sailor":11803,"hyde":11804,"clarence":11805,"intensive":11806,"pinned":11807,"nasty":11808,"useless":11809,"jung":11810,"clayton":11811,"stuffed":11812,"exceptional":11813,"ix":11814,"apostolic":11815,"230":11816,"transactions":11817,"##dge":11818,"exempt":11819,"swinging":11820,"cove":11821,"religions":11822,"##ash":11823,"shields":11824,"dairy":11825,"bypass":11826,"190":11827,"pursuing":11828,"bug":11829,"joyce":11830,"bombay":11831,"chassis":11832,"southampton":11833,"chat":11834,"interact":11835,"redesignated":11836,"##pen":11837,"nascar":11838,"pray":11839,"salmon":11840,"rigid":11841,"regained":11842,"malaysian":11843,"grim":11844,"publicity":11845,"constituted":11846,"capturing":11847,"toilet":11848,"delegate":11849,"purely":11850,"tray":11851,"drift":11852,"loosely":11853,"striker":11854,"weakened":11855,"trinidad":11856,"mitch":11857,"itv":11858,"defines":11859,"transmitted":11860,"ming":11861,"scarlet":11862,"nodding":11863,"fitzgerald":11864,"fu":11865,"narrowly":11866,"sp":11867,"tooth":11868,"standings":11869,"virtue":11870,"##₁":11871,"##wara":11872,"##cting":11873,"chateau":11874,"gloves":11875,"lid":11876,"##nel":11877,"hurting":11878,"conservatory":11879,"##pel":11880,"sinclair":11881,"reopened":11882,"sympathy":11883,"nigerian":11884,"strode":11885,"advocated":11886,"optional":11887,"chronic":11888,"discharge":11889,"##rc":11890,"suck":11891,"compatible":11892,"laurel":11893,"stella":11894,"shi":11895,"fails":11896,"wage":11897,"dodge":11898,"128":11899,"informal":11900,"sorts":11901,"levi":11902,"buddha":11903,"villagers":11904,"##aka":11905,"chronicles":11906,"heavier":11907,"summoned":11908,"gateway":11909,"3000":11910,"eleventh":11911,"jewelry":11912,"translations":11913,"accordingly":11914,"seas":11915,"##ency":11916,"fiber":11917,"pyramid":11918,"cubic":11919,"dragging":11920,"##ista":11921,"caring":11922,"##ops":11923,"android":11924,"contacted":11925,"lunar":11926,"##dt":11927,"kai":11928,"lisbon":11929,"patted":11930,"1826":11931,"sacramento":11932,"theft":11933,"madagascar":11934,"subtropical":11935,"disputes":11936,"ta":11937,"holidays":11938,"piper":11939,"willow":11940,"mare":11941,"cane":11942,"itunes":11943,"newfoundland":11944,"benny":11945,"companions":11946,"dong":11947,"raj":11948,"observe":11949,"roar":11950,"charming":11951,"plaque":11952,"tibetan":11953,"fossils":11954,"enacted":11955,"manning":11956,"bubble":11957,"tina":11958,"tanzania":11959,"##eda":11960,"##hir":11961,"funk":11962,"swamp":11963,"deputies":11964,"cloak":11965,"ufc":11966,"scenario":11967,"par":11968,"scratch":11969,"metals":11970,"anthem":11971,"guru":11972,"engaging":11973,"specially":11974,"##boat":11975,"dialects":11976,"nineteen":11977,"cecil":11978,"duet":11979,"disability":11980,"messenger":11981,"unofficial":11982,"##lies":11983,"defunct":11984,"eds":11985,"moonlight":11986,"drainage":11987,"surname":11988,"puzzle":11989,"honda":11990,"switching":11991,"conservatives":11992,"mammals":11993,"knox":11994,"broadcaster":11995,"sidewalk":11996,"cope":11997,"##ried":11998,"benson":11999,"princes":12000,"peterson":12001,"##sal":12002,"bedford":12003,"sharks":12004,"eli":12005,"wreck":12006,"alberto":12007,"gasp":12008,"archaeology":12009,"lgbt":12010,"teaches":12011,"securities":12012,"madness":12013,"compromise":12014,"waving":12015,"coordination":12016,"davidson":12017,"visions":12018,"leased":12019,"possibilities":12020,"eighty":12021,"jun":12022,"fernandez":12023,"enthusiasm":12024,"assassin":12025,"sponsorship":12026,"reviewer":12027,"kingdoms":12028,"estonian":12029,"laboratories":12030,"##fy":12031,"##nal":12032,"applies":12033,"verb":12034,"celebrations":12035,"##zzo":12036,"rowing":12037,"lightweight":12038,"sadness":12039,"submit":12040,"mvp":12041,"balanced":12042,"dude":12043,"##vas":12044,"explicitly":12045,"metric":12046,"magnificent":12047,"mound":12048,"brett":12049,"mohammad":12050,"mistakes":12051,"irregular":12052,"##hing":12053,"##ass":12054,"sanders":12055,"betrayed":12056,"shipped":12057,"surge":12058,"##enburg":12059,"reporters":12060,"termed":12061,"georg":12062,"pity":12063,"verbal":12064,"bulls":12065,"abbreviated":12066,"enabling":12067,"appealed":12068,"##are":12069,"##atic":12070,"sicily":12071,"sting":12072,"heel":12073,"sweetheart":12074,"bart":12075,"spacecraft":12076,"brutal":12077,"monarchy":12078,"##tter":12079,"aberdeen":12080,"cameo":12081,"diane":12082,"##ub":12083,"survivor":12084,"clyde":12085,"##aries":12086,"complaint":12087,"##makers":12088,"clarinet":12089,"delicious":12090,"chilean":12091,"karnataka":12092,"coordinates":12093,"1818":12094,"panties":12095,"##rst":12096,"pretending":12097,"ar":12098,"dramatically":12099,"kiev":12100,"bella":12101,"tends":12102,"distances":12103,"113":12104,"catalog":12105,"launching":12106,"instances":12107,"telecommunications":12108,"portable":12109,"lindsay":12110,"vatican":12111,"##eim":12112,"angles":12113,"aliens":12114,"marker":12115,"stint":12116,"screens":12117,"bolton":12118,"##rne":12119,"judy":12120,"wool":12121,"benedict":12122,"plasma":12123,"europa":12124,"spark":12125,"imaging":12126,"filmmaker":12127,"swiftly":12128,"##een":12129,"contributor":12130,"##nor":12131,"opted":12132,"stamps":12133,"apologize":12134,"financing":12135,"butter":12136,"gideon":12137,"sophisticated":12138,"alignment":12139,"avery":12140,"chemicals":12141,"yearly":12142,"speculation":12143,"prominence":12144,"professionally":12145,"##ils":12146,"immortal":12147,"institutional":12148,"inception":12149,"wrists":12150,"identifying":12151,"tribunal":12152,"derives":12153,"gains":12154,"##wo":12155,"papal":12156,"preference":12157,"linguistic":12158,"vince":12159,"operative":12160,"brewery":12161,"##ont":12162,"unemployment":12163,"boyd":12164,"##ured":12165,"##outs":12166,"albeit":12167,"prophet":12168,"1813":12169,"bi":12170,"##rr":12171,"##face":12172,"##rad":12173,"quarterly":12174,"asteroid":12175,"cleaned":12176,"radius":12177,"temper":12178,"##llen":12179,"telugu":12180,"jerk":12181,"viscount":12182,"menu":12183,"##ote":12184,"glimpse":12185,"##aya":12186,"yacht":12187,"hawaiian":12188,"baden":12189,"##rl":12190,"laptop":12191,"readily":12192,"##gu":12193,"monetary":12194,"offshore":12195,"scots":12196,"watches":12197,"##yang":12198,"##arian":12199,"upgrade":12200,"needle":12201,"xbox":12202,"lea":12203,"encyclopedia":12204,"flank":12205,"fingertips":12206,"##pus":12207,"delight":12208,"teachings":12209,"confirm":12210,"roth":12211,"beaches":12212,"midway":12213,"winters":12214,"##iah":12215,"teasing":12216,"daytime":12217,"beverly":12218,"gambling":12219,"bonnie":12220,"##backs":12221,"regulated":12222,"clement":12223,"hermann":12224,"tricks":12225,"knot":12226,"##shing":12227,"##uring":12228,"##vre":12229,"detached":12230,"ecological":12231,"owed":12232,"specialty":12233,"byron":12234,"inventor":12235,"bats":12236,"stays":12237,"screened":12238,"unesco":12239,"midland":12240,"trim":12241,"affection":12242,"##ander":12243,"##rry":12244,"jess":12245,"thoroughly":12246,"feedback":12247,"##uma":12248,"chennai":12249,"strained":12250,"heartbeat":12251,"wrapping":12252,"overtime":12253,"pleaded":12254,"##sworth":12255,"mon":12256,"leisure":12257,"oclc":12258,"##tate":12259,"##ele":12260,"feathers":12261,"angelo":12262,"thirds":12263,"nuts":12264,"surveys":12265,"clever":12266,"gill":12267,"commentator":12268,"##dos":12269,"darren":12270,"rides":12271,"gibraltar":12272,"##nc":12273,"##mu":12274,"dissolution":12275,"dedication":12276,"shin":12277,"meals":12278,"saddle":12279,"elvis":12280,"reds":12281,"chaired":12282,"taller":12283,"appreciation":12284,"functioning":12285,"niece":12286,"favored":12287,"advocacy":12288,"robbie":12289,"criminals":12290,"suffolk":12291,"yugoslav":12292,"passport":12293,"constable":12294,"congressman":12295,"hastings":12296,"vera":12297,"##rov":12298,"consecrated":12299,"sparks":12300,"ecclesiastical":12301,"confined":12302,"##ovich":12303,"muller":12304,"floyd":12305,"nora":12306,"1822":12307,"paved":12308,"1827":12309,"cumberland":12310,"ned":12311,"saga":12312,"spiral":12313,"##flow":12314,"appreciated":12315,"yi":12316,"collaborative":12317,"treating":12318,"similarities":12319,"feminine":12320,"finishes":12321,"##ib":12322,"jade":12323,"import":12324,"##nse":12325,"##hot":12326,"champagne":12327,"mice":12328,"securing":12329,"celebrities":12330,"helsinki":12331,"attributes":12332,"##gos":12333,"cousins":12334,"phases":12335,"ache":12336,"lucia":12337,"gandhi":12338,"submission":12339,"vicar":12340,"spear":12341,"shine":12342,"tasmania":12343,"biting":12344,"detention":12345,"constitute":12346,"tighter":12347,"seasonal":12348,"##gus":12349,"terrestrial":12350,"matthews":12351,"##oka":12352,"effectiveness":12353,"parody":12354,"philharmonic":12355,"##onic":12356,"1816":12357,"strangers":12358,"encoded":12359,"consortium":12360,"guaranteed":12361,"regards":12362,"shifts":12363,"tortured":12364,"collision":12365,"supervisor":12366,"inform":12367,"broader":12368,"insight":12369,"theaters":12370,"armour":12371,"emeritus":12372,"blink":12373,"incorporates":12374,"mapping":12375,"##50":12376,"##ein":12377,"handball":12378,"flexible":12379,"##nta":12380,"substantially":12381,"generous":12382,"thief":12383,"##own":12384,"carr":12385,"loses":12386,"1793":12387,"prose":12388,"ucla":12389,"romeo":12390,"generic":12391,"metallic":12392,"realization":12393,"damages":12394,"mk":12395,"commissioners":12396,"zach":12397,"default":12398,"##ther":12399,"helicopters":12400,"lengthy":12401,"stems":12402,"spa":12403,"partnered":12404,"spectators":12405,"rogue":12406,"indication":12407,"penalties":12408,"teresa":12409,"1801":12410,"sen":12411,"##tric":12412,"dalton":12413,"##wich":12414,"irving":12415,"photographic":12416,"##vey":12417,"dell":12418,"deaf":12419,"peters":12420,"excluded":12421,"unsure":12422,"##vable":12423,"patterson":12424,"crawled":12425,"##zio":12426,"resided":12427,"whipped":12428,"latvia":12429,"slower":12430,"ecole":12431,"pipes":12432,"employers":12433,"maharashtra":12434,"comparable":12435,"va":12436,"textile":12437,"pageant":12438,"##gel":12439,"alphabet":12440,"binary":12441,"irrigation":12442,"chartered":12443,"choked":12444,"antoine":12445,"offs":12446,"waking":12447,"supplement":12448,"##wen":12449,"quantities":12450,"demolition":12451,"regain":12452,"locate":12453,"urdu":12454,"folks":12455,"alt":12456,"114":12457,"##mc":12458,"scary":12459,"andreas":12460,"whites":12461,"##ava":12462,"classrooms":12463,"mw":12464,"aesthetic":12465,"publishes":12466,"valleys":12467,"guides":12468,"cubs":12469,"johannes":12470,"bryant":12471,"conventions":12472,"affecting":12473,"##itt":12474,"drain":12475,"awesome":12476,"isolation":12477,"prosecutor":12478,"ambitious":12479,"apology":12480,"captive":12481,"downs":12482,"atmospheric":12483,"lorenzo":12484,"aisle":12485,"beef":12486,"foul":12487,"##onia":12488,"kidding":12489,"composite":12490,"disturbed":12491,"illusion":12492,"natives":12493,"##ffer":12494,"emi":12495,"rockets":12496,"riverside":12497,"wartime":12498,"painters":12499,"adolf":12500,"melted":12501,"##ail":12502,"uncertainty":12503,"simulation":12504,"hawks":12505,"progressed":12506,"meantime":12507,"builder":12508,"spray":12509,"breach":12510,"unhappy":12511,"regina":12512,"russians":12513,"##urg":12514,"determining":12515,"##tation":12516,"tram":12517,"1806":12518,"##quin":12519,"aging":12520,"##12":12521,"1823":12522,"garion":12523,"rented":12524,"mister":12525,"diaz":12526,"terminated":12527,"clip":12528,"1817":12529,"depend":12530,"nervously":12531,"disco":12532,"owe":12533,"defenders":12534,"shiva":12535,"notorious":12536,"disbelief":12537,"shiny":12538,"worcester":12539,"##gation":12540,"##yr":12541,"trailing":12542,"undertook":12543,"islander":12544,"belarus":12545,"limitations":12546,"watershed":12547,"fuller":12548,"overlooking":12549,"utilized":12550,"raphael":12551,"1819":12552,"synthetic":12553,"breakdown":12554,"klein":12555,"##nate":12556,"moaned":12557,"memoir":12558,"lamb":12559,"practicing":12560,"##erly":12561,"cellular":12562,"arrows":12563,"exotic":12564,"##graphy":12565,"witches":12566,"117":12567,"charted":12568,"rey":12569,"hut":12570,"hierarchy":12571,"subdivision":12572,"freshwater":12573,"giuseppe":12574,"aloud":12575,"reyes":12576,"qatar":12577,"marty":12578,"sideways":12579,"utterly":12580,"sexually":12581,"jude":12582,"prayers":12583,"mccarthy":12584,"softball":12585,"blend":12586,"damien":12587,"##gging":12588,"##metric":12589,"wholly":12590,"erupted":12591,"lebanese":12592,"negro":12593,"revenues":12594,"tasted":12595,"comparative":12596,"teamed":12597,"transaction":12598,"labeled":12599,"maori":12600,"sovereignty":12601,"parkway":12602,"trauma":12603,"gran":12604,"malay":12605,"121":12606,"advancement":12607,"descendant":12608,"2020":12609,"buzz":12610,"salvation":12611,"inventory":12612,"symbolic":12613,"##making":12614,"antarctica":12615,"mps":12616,"##gas":12617,"##bro":12618,"mohammed":12619,"myanmar":12620,"holt":12621,"submarines":12622,"tones":12623,"##lman":12624,"locker":12625,"patriarch":12626,"bangkok":12627,"emerson":12628,"remarks":12629,"predators":12630,"kin":12631,"afghan":12632,"confession":12633,"norwich":12634,"rental":12635,"emerge":12636,"advantages":12637,"##zel":12638,"rca":12639,"##hold":12640,"shortened":12641,"storms":12642,"aidan":12643,"##matic":12644,"autonomy":12645,"compliance":12646,"##quet":12647,"dudley":12648,"atp":12649,"##osis":12650,"1803":12651,"motto":12652,"documentation":12653,"summary":12654,"professors":12655,"spectacular":12656,"christina":12657,"archdiocese":12658,"flashing":12659,"innocence":12660,"remake":12661,"##dell":12662,"psychic":12663,"reef":12664,"scare":12665,"employ":12666,"rs":12667,"sticks":12668,"meg":12669,"gus":12670,"leans":12671,"##ude":12672,"accompany":12673,"bergen":12674,"tomas":12675,"##iko":12676,"doom":12677,"wages":12678,"pools":12679,"##nch":12680,"##bes":12681,"breasts":12682,"scholarly":12683,"alison":12684,"outline":12685,"brittany":12686,"breakthrough":12687,"willis":12688,"realistic":12689,"##cut":12690,"##boro":12691,"competitor":12692,"##stan":12693,"pike":12694,"picnic":12695,"icon":12696,"designing":12697,"commercials":12698,"washing":12699,"villain":12700,"skiing":12701,"micro":12702,"costumes":12703,"auburn":12704,"halted":12705,"executives":12706,"##hat":12707,"logistics":12708,"cycles":12709,"vowel":12710,"applicable":12711,"barrett":12712,"exclaimed":12713,"eurovision":12714,"eternity":12715,"ramon":12716,"##umi":12717,"##lls":12718,"modifications":12719,"sweeping":12720,"disgust":12721,"##uck":12722,"torch":12723,"aviv":12724,"ensuring":12725,"rude":12726,"dusty":12727,"sonic":12728,"donovan":12729,"outskirts":12730,"cu":12731,"pathway":12732,"##band":12733,"##gun":12734,"##lines":12735,"disciplines":12736,"acids":12737,"cadet":12738,"paired":12739,"##40":12740,"sketches":12741,"##sive":12742,"marriages":12743,"##⁺":12744,"folding":12745,"peers":12746,"slovak":12747,"implies":12748,"admired":12749,"##beck":12750,"1880s":12751,"leopold":12752,"instinct":12753,"attained":12754,"weston":12755,"megan":12756,"horace":12757,"##ination":12758,"dorsal":12759,"ingredients":12760,"evolutionary":12761,"##its":12762,"complications":12763,"deity":12764,"lethal":12765,"brushing":12766,"levy":12767,"deserted":12768,"institutes":12769,"posthumously":12770,"delivering":12771,"telescope":12772,"coronation":12773,"motivated":12774,"rapids":12775,"luc":12776,"flicked":12777,"pays":12778,"volcano":12779,"tanner":12780,"weighed":12781,"##nica":12782,"crowds":12783,"frankie":12784,"gifted":12785,"addressing":12786,"granddaughter":12787,"winding":12788,"##rna":12789,"constantine":12790,"gomez":12791,"##front":12792,"landscapes":12793,"rudolf":12794,"anthropology":12795,"slate":12796,"werewolf":12797,"##lio":12798,"astronomy":12799,"circa":12800,"rouge":12801,"dreaming":12802,"sack":12803,"knelt":12804,"drowned":12805,"naomi":12806,"prolific":12807,"tracked":12808,"freezing":12809,"herb":12810,"##dium":12811,"agony":12812,"randall":12813,"twisting":12814,"wendy":12815,"deposit":12816,"touches":12817,"vein":12818,"wheeler":12819,"##bbled":12820,"##bor":12821,"batted":12822,"retaining":12823,"tire":12824,"presently":12825,"compare":12826,"specification":12827,"daemon":12828,"nigel":12829,"##grave":12830,"merry":12831,"recommendation":12832,"czechoslovakia":12833,"sandra":12834,"ng":12835,"roma":12836,"##sts":12837,"lambert":12838,"inheritance":12839,"sheikh":12840,"winchester":12841,"cries":12842,"examining":12843,"##yle":12844,"comeback":12845,"cuisine":12846,"nave":12847,"##iv":12848,"ko":12849,"retrieve":12850,"tomatoes":12851,"barker":12852,"polished":12853,"defining":12854,"irene":12855,"lantern":12856,"personalities":12857,"begging":12858,"tract":12859,"swore":12860,"1809":12861,"175":12862,"##gic":12863,"omaha":12864,"brotherhood":12865,"##rley":12866,"haiti":12867,"##ots":12868,"exeter":12869,"##ete":12870,"##zia":12871,"steele":12872,"dumb":12873,"pearson":12874,"210":12875,"surveyed":12876,"elisabeth":12877,"trends":12878,"##ef":12879,"fritz":12880,"##rf":12881,"premium":12882,"bugs":12883,"fraction":12884,"calmly":12885,"viking":12886,"##birds":12887,"tug":12888,"inserted":12889,"unusually":12890,"##ield":12891,"confronted":12892,"distress":12893,"crashing":12894,"brent":12895,"turks":12896,"resign":12897,"##olo":12898,"cambodia":12899,"gabe":12900,"sauce":12901,"##kal":12902,"evelyn":12903,"116":12904,"extant":12905,"clusters":12906,"quarry":12907,"teenagers":12908,"luna":12909,"##lers":12910,"##ister":12911,"affiliation":12912,"drill":12913,"##ashi":12914,"panthers":12915,"scenic":12916,"libya":12917,"anita":12918,"strengthen":12919,"inscriptions":12920,"##cated":12921,"lace":12922,"sued":12923,"judith":12924,"riots":12925,"##uted":12926,"mint":12927,"##eta":12928,"preparations":12929,"midst":12930,"dub":12931,"challenger":12932,"##vich":12933,"mock":12934,"cf":12935,"displaced":12936,"wicket":12937,"breaths":12938,"enables":12939,"schmidt":12940,"analyst":12941,"##lum":12942,"ag":12943,"highlight":12944,"automotive":12945,"axe":12946,"josef":12947,"newark":12948,"sufficiently":12949,"resembles":12950,"50th":12951,"##pal":12952,"flushed":12953,"mum":12954,"traits":12955,"##ante":12956,"commodore":12957,"incomplete":12958,"warming":12959,"titular":12960,"ceremonial":12961,"ethical":12962,"118":12963,"celebrating":12964,"eighteenth":12965,"cao":12966,"lima":12967,"medalist":12968,"mobility":12969,"strips":12970,"snakes":12971,"##city":12972,"miniature":12973,"zagreb":12974,"barton":12975,"escapes":12976,"umbrella":12977,"automated":12978,"doubted":12979,"differs":12980,"cooled":12981,"georgetown":12982,"dresden":12983,"cooked":12984,"fade":12985,"wyatt":12986,"rna":12987,"jacobs":12988,"carlton":12989,"abundant":12990,"stereo":12991,"boost":12992,"madras":12993,"inning":12994,"##hia":12995,"spur":12996,"ip":12997,"malayalam":12998,"begged":12999,"osaka":13000,"groan":13001,"escaping":13002,"charging":13003,"dose":13004,"vista":13005,"##aj":13006,"bud":13007,"papa":13008,"communists":13009,"advocates":13010,"edged":13011,"tri":13012,"##cent":13013,"resemble":13014,"peaking":13015,"necklace":13016,"fried":13017,"montenegro":13018,"saxony":13019,"goose":13020,"glances":13021,"stuttgart":13022,"curator":13023,"recruit":13024,"grocery":13025,"sympathetic":13026,"##tting":13027,"##fort":13028,"127":13029,"lotus":13030,"randolph":13031,"ancestor":13032,"##rand":13033,"succeeding":13034,"jupiter":13035,"1798":13036,"macedonian":13037,"##heads":13038,"hiking":13039,"1808":13040,"handing":13041,"fischer":13042,"##itive":13043,"garbage":13044,"node":13045,"##pies":13046,"prone":13047,"singular":13048,"papua":13049,"inclined":13050,"attractions":13051,"italia":13052,"pouring":13053,"motioned":13054,"grandma":13055,"garnered":13056,"jacksonville":13057,"corp":13058,"ego":13059,"ringing":13060,"aluminum":13061,"##hausen":13062,"ordering":13063,"##foot":13064,"drawer":13065,"traders":13066,"synagogue":13067,"##play":13068,"##kawa":13069,"resistant":13070,"wandering":13071,"fragile":13072,"fiona":13073,"teased":13074,"var":13075,"hardcore":13076,"soaked":13077,"jubilee":13078,"decisive":13079,"exposition":13080,"mercer":13081,"poster":13082,"valencia":13083,"hale":13084,"kuwait":13085,"1811":13086,"##ises":13087,"##wr":13088,"##eed":13089,"tavern":13090,"gamma":13091,"122":13092,"johan":13093,"##uer":13094,"airways":13095,"amino":13096,"gil":13097,"##ury":13098,"vocational":13099,"domains":13100,"torres":13101,"##sp":13102,"generator":13103,"folklore":13104,"outcomes":13105,"##keeper":13106,"canberra":13107,"shooter":13108,"fl":13109,"beams":13110,"confrontation":13111,"##lling":13112,"##gram":13113,"feb":13114,"aligned":13115,"forestry":13116,"pipeline":13117,"jax":13118,"motorway":13119,"conception":13120,"decay":13121,"##tos":13122,"coffin":13123,"##cott":13124,"stalin":13125,"1805":13126,"escorted":13127,"minded":13128,"##nam":13129,"sitcom":13130,"purchasing":13131,"twilight":13132,"veronica":13133,"additions":13134,"passive":13135,"tensions":13136,"straw":13137,"123":13138,"frequencies":13139,"1804":13140,"refugee":13141,"cultivation":13142,"##iate":13143,"christie":13144,"clary":13145,"bulletin":13146,"crept":13147,"disposal":13148,"##rich":13149,"##zong":13150,"processor":13151,"crescent":13152,"##rol":13153,"bmw":13154,"emphasized":13155,"whale":13156,"nazis":13157,"aurora":13158,"##eng":13159,"dwelling":13160,"hauled":13161,"sponsors":13162,"toledo":13163,"mega":13164,"ideology":13165,"theatres":13166,"tessa":13167,"cerambycidae":13168,"saves":13169,"turtle":13170,"cone":13171,"suspects":13172,"kara":13173,"rusty":13174,"yelling":13175,"greeks":13176,"mozart":13177,"shades":13178,"cocked":13179,"participant":13180,"##tro":13181,"shire":13182,"spit":13183,"freeze":13184,"necessity":13185,"##cos":13186,"inmates":13187,"nielsen":13188,"councillors":13189,"loaned":13190,"uncommon":13191,"omar":13192,"peasants":13193,"botanical":13194,"offspring":13195,"daniels":13196,"formations":13197,"jokes":13198,"1794":13199,"pioneers":13200,"sigma":13201,"licensing":13202,"##sus":13203,"wheelchair":13204,"polite":13205,"1807":13206,"liquor":13207,"pratt":13208,"trustee":13209,"##uta":13210,"forewings":13211,"balloon":13212,"##zz":13213,"kilometre":13214,"camping":13215,"explicit":13216,"casually":13217,"shawn":13218,"foolish":13219,"teammates":13220,"nm":13221,"hassan":13222,"carrie":13223,"judged":13224,"satisfy":13225,"vanessa":13226,"knives":13227,"selective":13228,"cnn":13229,"flowed":13230,"##lice":13231,"eclipse":13232,"stressed":13233,"eliza":13234,"mathematician":13235,"cease":13236,"cultivated":13237,"##roy":13238,"commissions":13239,"browns":13240,"##ania":13241,"destroyers":13242,"sheridan":13243,"meadow":13244,"##rius":13245,"minerals":13246,"##cial":13247,"downstream":13248,"clash":13249,"gram":13250,"memoirs":13251,"ventures":13252,"baha":13253,"seymour":13254,"archie":13255,"midlands":13256,"edith":13257,"fare":13258,"flynn":13259,"invite":13260,"canceled":13261,"tiles":13262,"stabbed":13263,"boulder":13264,"incorporate":13265,"amended":13266,"camden":13267,"facial":13268,"mollusk":13269,"unreleased":13270,"descriptions":13271,"yoga":13272,"grabs":13273,"550":13274,"raises":13275,"ramp":13276,"shiver":13277,"##rose":13278,"coined":13279,"pioneering":13280,"tunes":13281,"qing":13282,"warwick":13283,"tops":13284,"119":13285,"melanie":13286,"giles":13287,"##rous":13288,"wandered":13289,"##inal":13290,"annexed":13291,"nov":13292,"30th":13293,"unnamed":13294,"##ished":13295,"organizational":13296,"airplane":13297,"normandy":13298,"stoke":13299,"whistle":13300,"blessing":13301,"violations":13302,"chased":13303,"holders":13304,"shotgun":13305,"##ctic":13306,"outlet":13307,"reactor":13308,"##vik":13309,"tires":13310,"tearing":13311,"shores":13312,"fortified":13313,"mascot":13314,"constituencies":13315,"nc":13316,"columnist":13317,"productive":13318,"tibet":13319,"##rta":13320,"lineage":13321,"hooked":13322,"oct":13323,"tapes":13324,"judging":13325,"cody":13326,"##gger":13327,"hansen":13328,"kashmir":13329,"triggered":13330,"##eva":13331,"solved":13332,"cliffs":13333,"##tree":13334,"resisted":13335,"anatomy":13336,"protesters":13337,"transparent":13338,"implied":13339,"##iga":13340,"injection":13341,"mattress":13342,"excluding":13343,"##mbo":13344,"defenses":13345,"helpless":13346,"devotion":13347,"##elli":13348,"growl":13349,"liberals":13350,"weber":13351,"phenomena":13352,"atoms":13353,"plug":13354,"##iff":13355,"mortality":13356,"apprentice":13357,"howe":13358,"convincing":13359,"aaa":13360,"swimmer":13361,"barber":13362,"leone":13363,"promptly":13364,"sodium":13365,"def":13366,"nowadays":13367,"arise":13368,"##oning":13369,"gloucester":13370,"corrected":13371,"dignity":13372,"norm":13373,"erie":13374,"##ders":13375,"elders":13376,"evacuated":13377,"sylvia":13378,"compression":13379,"##yar":13380,"hartford":13381,"pose":13382,"backpack":13383,"reasoning":13384,"accepts":13385,"24th":13386,"wipe":13387,"millimetres":13388,"marcel":13389,"##oda":13390,"dodgers":13391,"albion":13392,"1790":13393,"overwhelmed":13394,"aerospace":13395,"oaks":13396,"1795":13397,"showcase":13398,"acknowledge":13399,"recovering":13400,"nolan":13401,"ashe":13402,"hurts":13403,"geology":13404,"fashioned":13405,"disappearance":13406,"farewell":13407,"swollen":13408,"shrug":13409,"marquis":13410,"wimbledon":13411,"124":13412,"rue":13413,"1792":13414,"commemorate":13415,"reduces":13416,"experiencing":13417,"inevitable":13418,"calcutta":13419,"intel":13420,"##court":13421,"murderer":13422,"sticking":13423,"fisheries":13424,"imagery":13425,"bloom":13426,"280":13427,"brake":13428,"##inus":13429,"gustav":13430,"hesitation":13431,"memorable":13432,"po":13433,"viral":13434,"beans":13435,"accidents":13436,"tunisia":13437,"antenna":13438,"spilled":13439,"consort":13440,"treatments":13441,"aye":13442,"perimeter":13443,"##gard":13444,"donation":13445,"hostage":13446,"migrated":13447,"banker":13448,"addiction":13449,"apex":13450,"lil":13451,"trout":13452,"##ously":13453,"conscience":13454,"##nova":13455,"rams":13456,"sands":13457,"genome":13458,"passionate":13459,"troubles":13460,"##lets":13461,"##set":13462,"amid":13463,"##ibility":13464,"##ret":13465,"higgins":13466,"exceed":13467,"vikings":13468,"##vie":13469,"payne":13470,"##zan":13471,"muscular":13472,"##ste":13473,"defendant":13474,"sucking":13475,"##wal":13476,"ibrahim":13477,"fuselage":13478,"claudia":13479,"vfl":13480,"europeans":13481,"snails":13482,"interval":13483,"##garh":13484,"preparatory":13485,"statewide":13486,"tasked":13487,"lacrosse":13488,"viktor":13489,"##lation":13490,"angola":13491,"##hra":13492,"flint":13493,"implications":13494,"employs":13495,"teens":13496,"patrons":13497,"stall":13498,"weekends":13499,"barriers":13500,"scrambled":13501,"nucleus":13502,"tehran":13503,"jenna":13504,"parsons":13505,"lifelong":13506,"robots":13507,"displacement":13508,"5000":13509,"##bles":13510,"precipitation":13511,"##gt":13512,"knuckles":13513,"clutched":13514,"1802":13515,"marrying":13516,"ecology":13517,"marx":13518,"accusations":13519,"declare":13520,"scars":13521,"kolkata":13522,"mat":13523,"meadows":13524,"bermuda":13525,"skeleton":13526,"finalists":13527,"vintage":13528,"crawl":13529,"coordinate":13530,"affects":13531,"subjected":13532,"orchestral":13533,"mistaken":13534,"##tc":13535,"mirrors":13536,"dipped":13537,"relied":13538,"260":13539,"arches":13540,"candle":13541,"##nick":13542,"incorporating":13543,"wildly":13544,"fond":13545,"basilica":13546,"owl":13547,"fringe":13548,"rituals":13549,"whispering":13550,"stirred":13551,"feud":13552,"tertiary":13553,"slick":13554,"goat":13555,"honorable":13556,"whereby":13557,"skip":13558,"ricardo":13559,"stripes":13560,"parachute":13561,"adjoining":13562,"submerged":13563,"synthesizer":13564,"##gren":13565,"intend":13566,"positively":13567,"ninety":13568,"phi":13569,"beaver":13570,"partition":13571,"fellows":13572,"alexis":13573,"prohibition":13574,"carlisle":13575,"bizarre":13576,"fraternity":13577,"##bre":13578,"doubts":13579,"icy":13580,"cbc":13581,"aquatic":13582,"sneak":13583,"sonny":13584,"combines":13585,"airports":13586,"crude":13587,"supervised":13588,"spatial":13589,"merge":13590,"alfonso":13591,"##bic":13592,"corrupt":13593,"scan":13594,"undergo":13595,"##ams":13596,"disabilities":13597,"colombian":13598,"comparing":13599,"dolphins":13600,"perkins":13601,"##lish":13602,"reprinted":13603,"unanimous":13604,"bounced":13605,"hairs":13606,"underworld":13607,"midwest":13608,"semester":13609,"bucket":13610,"paperback":13611,"miniseries":13612,"coventry":13613,"demise":13614,"##leigh":13615,"demonstrations":13616,"sensor":13617,"rotating":13618,"yan":13619,"##hler":13620,"arrange":13621,"soils":13622,"##idge":13623,"hyderabad":13624,"labs":13625,"##dr":13626,"brakes":13627,"grandchildren":13628,"##nde":13629,"negotiated":13630,"rover":13631,"ferrari":13632,"continuation":13633,"directorate":13634,"augusta":13635,"stevenson":13636,"counterpart":13637,"gore":13638,"##rda":13639,"nursery":13640,"rican":13641,"ave":13642,"collectively":13643,"broadly":13644,"pastoral":13645,"repertoire":13646,"asserted":13647,"discovering":13648,"nordic":13649,"styled":13650,"fiba":13651,"cunningham":13652,"harley":13653,"middlesex":13654,"survives":13655,"tumor":13656,"tempo":13657,"zack":13658,"aiming":13659,"lok":13660,"urgent":13661,"##rade":13662,"##nto":13663,"devils":13664,"##ement":13665,"contractor":13666,"turin":13667,"##wl":13668,"##ool":13669,"bliss":13670,"repaired":13671,"simmons":13672,"moan":13673,"astronomical":13674,"cr":13675,"negotiate":13676,"lyric":13677,"1890s":13678,"lara":13679,"bred":13680,"clad":13681,"angus":13682,"pbs":13683,"##ience":13684,"engineered":13685,"posed":13686,"##lk":13687,"hernandez":13688,"possessions":13689,"elbows":13690,"psychiatric":13691,"strokes":13692,"confluence":13693,"electorate":13694,"lifts":13695,"campuses":13696,"lava":13697,"alps":13698,"##ep":13699,"##ution":13700,"##date":13701,"physicist":13702,"woody":13703,"##page":13704,"##ographic":13705,"##itis":13706,"juliet":13707,"reformation":13708,"sparhawk":13709,"320":13710,"complement":13711,"suppressed":13712,"jewel":13713,"##½":13714,"floated":13715,"##kas":13716,"continuity":13717,"sadly":13718,"##ische":13719,"inability":13720,"melting":13721,"scanning":13722,"paula":13723,"flour":13724,"judaism":13725,"safer":13726,"vague":13727,"##lm":13728,"solving":13729,"curb":13730,"##stown":13731,"financially":13732,"gable":13733,"bees":13734,"expired":13735,"miserable":13736,"cassidy":13737,"dominion":13738,"1789":13739,"cupped":13740,"145":13741,"robbery":13742,"facto":13743,"amos":13744,"warden":13745,"resume":13746,"tallest":13747,"marvin":13748,"ing":13749,"pounded":13750,"usd":13751,"declaring":13752,"gasoline":13753,"##aux":13754,"darkened":13755,"270":13756,"650":13757,"sophomore":13758,"##mere":13759,"erection":13760,"gossip":13761,"televised":13762,"risen":13763,"dial":13764,"##eu":13765,"pillars":13766,"##link":13767,"passages":13768,"profound":13769,"##tina":13770,"arabian":13771,"ashton":13772,"silicon":13773,"nail":13774,"##ead":13775,"##lated":13776,"##wer":13777,"##hardt":13778,"fleming":13779,"firearms":13780,"ducked":13781,"circuits":13782,"blows":13783,"waterloo":13784,"titans":13785,"##lina":13786,"atom":13787,"fireplace":13788,"cheshire":13789,"financed":13790,"activation":13791,"algorithms":13792,"##zzi":13793,"constituent":13794,"catcher":13795,"cherokee":13796,"partnerships":13797,"sexuality":13798,"platoon":13799,"tragic":13800,"vivian":13801,"guarded":13802,"whiskey":13803,"meditation":13804,"poetic":13805,"##late":13806,"##nga":13807,"##ake":13808,"porto":13809,"listeners":13810,"dominance":13811,"kendra":13812,"mona":13813,"chandler":13814,"factions":13815,"22nd":13816,"salisbury":13817,"attitudes":13818,"derivative":13819,"##ido":13820,"##haus":13821,"intake":13822,"paced":13823,"javier":13824,"illustrator":13825,"barrels":13826,"bias":13827,"cockpit":13828,"burnett":13829,"dreamed":13830,"ensuing":13831,"##anda":13832,"receptors":13833,"someday":13834,"hawkins":13835,"mattered":13836,"##lal":13837,"slavic":13838,"1799":13839,"jesuit":13840,"cameroon":13841,"wasted":13842,"tai":13843,"wax":13844,"lowering":13845,"victorious":13846,"freaking":13847,"outright":13848,"hancock":13849,"librarian":13850,"sensing":13851,"bald":13852,"calcium":13853,"myers":13854,"tablet":13855,"announcing":13856,"barack":13857,"shipyard":13858,"pharmaceutical":13859,"##uan":13860,"greenwich":13861,"flush":13862,"medley":13863,"patches":13864,"wolfgang":13865,"pt":13866,"speeches":13867,"acquiring":13868,"exams":13869,"nikolai":13870,"##gg":13871,"hayden":13872,"kannada":13873,"##type":13874,"reilly":13875,"##pt":13876,"waitress":13877,"abdomen":13878,"devastated":13879,"capped":13880,"pseudonym":13881,"pharmacy":13882,"fulfill":13883,"paraguay":13884,"1796":13885,"clicked":13886,"##trom":13887,"archipelago":13888,"syndicated":13889,"##hman":13890,"lumber":13891,"orgasm":13892,"rejection":13893,"clifford":13894,"lorraine":13895,"advent":13896,"mafia":13897,"rodney":13898,"brock":13899,"##ght":13900,"##used":13901,"##elia":13902,"cassette":13903,"chamberlain":13904,"despair":13905,"mongolia":13906,"sensors":13907,"developmental":13908,"upstream":13909,"##eg":13910,"##alis":13911,"spanning":13912,"165":13913,"trombone":13914,"basque":13915,"seeded":13916,"interred":13917,"renewable":13918,"rhys":13919,"leapt":13920,"revision":13921,"molecule":13922,"##ages":13923,"chord":13924,"vicious":13925,"nord":13926,"shivered":13927,"23rd":13928,"arlington":13929,"debts":13930,"corpus":13931,"sunrise":13932,"bays":13933,"blackburn":13934,"centimetres":13935,"##uded":13936,"shuddered":13937,"gm":13938,"strangely":13939,"gripping":13940,"cartoons":13941,"isabelle":13942,"orbital":13943,"##ppa":13944,"seals":13945,"proving":13946,"##lton":13947,"refusal":13948,"strengthened":13949,"bust":13950,"assisting":13951,"baghdad":13952,"batsman":13953,"portrayal":13954,"mara":13955,"pushes":13956,"spears":13957,"og":13958,"##cock":13959,"reside":13960,"nathaniel":13961,"brennan":13962,"1776":13963,"confirmation":13964,"caucus":13965,"##worthy":13966,"markings":13967,"yemen":13968,"nobles":13969,"ku":13970,"lazy":13971,"viewer":13972,"catalan":13973,"encompasses":13974,"sawyer":13975,"##fall":13976,"sparked":13977,"substances":13978,"patents":13979,"braves":13980,"arranger":13981,"evacuation":13982,"sergio":13983,"persuade":13984,"dover":13985,"tolerance":13986,"penguin":13987,"cum":13988,"jockey":13989,"insufficient":13990,"townships":13991,"occupying":13992,"declining":13993,"plural":13994,"processed":13995,"projection":13996,"puppet":13997,"flanders":13998,"introduces":13999,"liability":14000,"##yon":14001,"gymnastics":14002,"antwerp":14003,"taipei":14004,"hobart":14005,"candles":14006,"jeep":14007,"wes":14008,"observers":14009,"126":14010,"chaplain":14011,"bundle":14012,"glorious":14013,"##hine":14014,"hazel":14015,"flung":14016,"sol":14017,"excavations":14018,"dumped":14019,"stares":14020,"sh":14021,"bangalore":14022,"triangular":14023,"icelandic":14024,"intervals":14025,"expressing":14026,"turbine":14027,"##vers":14028,"songwriting":14029,"crafts":14030,"##igo":14031,"jasmine":14032,"ditch":14033,"rite":14034,"##ways":14035,"entertaining":14036,"comply":14037,"sorrow":14038,"wrestlers":14039,"basel":14040,"emirates":14041,"marian":14042,"rivera":14043,"helpful":14044,"##some":14045,"caution":14046,"downward":14047,"networking":14048,"##atory":14049,"##tered":14050,"darted":14051,"genocide":14052,"emergence":14053,"replies":14054,"specializing":14055,"spokesman":14056,"convenient":14057,"unlocked":14058,"fading":14059,"augustine":14060,"concentrations":14061,"resemblance":14062,"elijah":14063,"investigator":14064,"andhra":14065,"##uda":14066,"promotes":14067,"bean":14068,"##rrell":14069,"fleeing":14070,"wan":14071,"simone":14072,"announcer":14073,"##ame":14074,"##bby":14075,"lydia":14076,"weaver":14077,"132":14078,"residency":14079,"modification":14080,"##fest":14081,"stretches":14082,"##ast":14083,"alternatively":14084,"nat":14085,"lowe":14086,"lacks":14087,"##ented":14088,"pam":14089,"tile":14090,"concealed":14091,"inferior":14092,"abdullah":14093,"residences":14094,"tissues":14095,"vengeance":14096,"##ided":14097,"moisture":14098,"peculiar":14099,"groove":14100,"zip":14101,"bologna":14102,"jennings":14103,"ninja":14104,"oversaw":14105,"zombies":14106,"pumping":14107,"batch":14108,"livingston":14109,"emerald":14110,"installations":14111,"1797":14112,"peel":14113,"nitrogen":14114,"rama":14115,"##fying":14116,"##star":14117,"schooling":14118,"strands":14119,"responding":14120,"werner":14121,"##ost":14122,"lime":14123,"casa":14124,"accurately":14125,"targeting":14126,"##rod":14127,"underway":14128,"##uru":14129,"hemisphere":14130,"lester":14131,"##yard":14132,"occupies":14133,"2d":14134,"griffith":14135,"angrily":14136,"reorganized":14137,"##owing":14138,"courtney":14139,"deposited":14140,"##dd":14141,"##30":14142,"estadio":14143,"##ifies":14144,"dunn":14145,"exiled":14146,"##ying":14147,"checks":14148,"##combe":14149,"##о":14150,"##fly":14151,"successes":14152,"unexpectedly":14153,"blu":14154,"assessed":14155,"##flower":14156,"##ه":14157,"observing":14158,"sacked":14159,"spiders":14160,"kn":14161,"##tail":14162,"mu":14163,"nodes":14164,"prosperity":14165,"audrey":14166,"divisional":14167,"155":14168,"broncos":14169,"tangled":14170,"adjust":14171,"feeds":14172,"erosion":14173,"paolo":14174,"surf":14175,"directory":14176,"snatched":14177,"humid":14178,"admiralty":14179,"screwed":14180,"gt":14181,"reddish":14182,"##nese":14183,"modules":14184,"trench":14185,"lamps":14186,"bind":14187,"leah":14188,"bucks":14189,"competes":14190,"##nz":14191,"##form":14192,"transcription":14193,"##uc":14194,"isles":14195,"violently":14196,"clutching":14197,"pga":14198,"cyclist":14199,"inflation":14200,"flats":14201,"ragged":14202,"unnecessary":14203,"##hian":14204,"stubborn":14205,"coordinated":14206,"harriet":14207,"baba":14208,"disqualified":14209,"330":14210,"insect":14211,"wolfe":14212,"##fies":14213,"reinforcements":14214,"rocked":14215,"duel":14216,"winked":14217,"embraced":14218,"bricks":14219,"##raj":14220,"hiatus":14221,"defeats":14222,"pending":14223,"brightly":14224,"jealousy":14225,"##xton":14226,"##hm":14227,"##uki":14228,"lena":14229,"gdp":14230,"colorful":14231,"##dley":14232,"stein":14233,"kidney":14234,"##shu":14235,"underwear":14236,"wanderers":14237,"##haw":14238,"##icus":14239,"guardians":14240,"m³":14241,"roared":14242,"habits":14243,"##wise":14244,"permits":14245,"gp":14246,"uranium":14247,"punished":14248,"disguise":14249,"bundesliga":14250,"elise":14251,"dundee":14252,"erotic":14253,"partisan":14254,"pi":14255,"collectors":14256,"float":14257,"individually":14258,"rendering":14259,"behavioral":14260,"bucharest":14261,"ser":14262,"hare":14263,"valerie":14264,"corporal":14265,"nutrition":14266,"proportional":14267,"##isa":14268,"immense":14269,"##kis":14270,"pavement":14271,"##zie":14272,"##eld":14273,"sutherland":14274,"crouched":14275,"1775":14276,"##lp":14277,"suzuki":14278,"trades":14279,"endurance":14280,"operas":14281,"crosby":14282,"prayed":14283,"priory":14284,"rory":14285,"socially":14286,"##urn":14287,"gujarat":14288,"##pu":14289,"walton":14290,"cube":14291,"pasha":14292,"privilege":14293,"lennon":14294,"floods":14295,"thorne":14296,"waterfall":14297,"nipple":14298,"scouting":14299,"approve":14300,"##lov":14301,"minorities":14302,"voter":14303,"dwight":14304,"extensions":14305,"assure":14306,"ballroom":14307,"slap":14308,"dripping":14309,"privileges":14310,"rejoined":14311,"confessed":14312,"demonstrating":14313,"patriotic":14314,"yell":14315,"investor":14316,"##uth":14317,"pagan":14318,"slumped":14319,"squares":14320,"##cle":14321,"##kins":14322,"confront":14323,"bert":14324,"embarrassment":14325,"##aid":14326,"aston":14327,"urging":14328,"sweater":14329,"starr":14330,"yuri":14331,"brains":14332,"williamson":14333,"commuter":14334,"mortar":14335,"structured":14336,"selfish":14337,"exports":14338,"##jon":14339,"cds":14340,"##him":14341,"unfinished":14342,"##rre":14343,"mortgage":14344,"destinations":14345,"##nagar":14346,"canoe":14347,"solitary":14348,"buchanan":14349,"delays":14350,"magistrate":14351,"fk":14352,"##pling":14353,"motivation":14354,"##lier":14355,"##vier":14356,"recruiting":14357,"assess":14358,"##mouth":14359,"malik":14360,"antique":14361,"1791":14362,"pius":14363,"rahman":14364,"reich":14365,"tub":14366,"zhou":14367,"smashed":14368,"airs":14369,"galway":14370,"xii":14371,"conditioning":14372,"honduras":14373,"discharged":14374,"dexter":14375,"##pf":14376,"lionel":14377,"129":14378,"debates":14379,"lemon":14380,"tiffany":14381,"volunteered":14382,"dom":14383,"dioxide":14384,"procession":14385,"devi":14386,"sic":14387,"tremendous":14388,"advertisements":14389,"colts":14390,"transferring":14391,"verdict":14392,"hanover":14393,"decommissioned":14394,"utter":14395,"relate":14396,"pac":14397,"racism":14398,"##top":14399,"beacon":14400,"limp":14401,"similarity":14402,"terra":14403,"occurrence":14404,"ant":14405,"##how":14406,"becky":14407,"capt":14408,"updates":14409,"armament":14410,"richie":14411,"pal":14412,"##graph":14413,"halloween":14414,"mayo":14415,"##ssen":14416,"##bone":14417,"cara":14418,"serena":14419,"fcc":14420,"dolls":14421,"obligations":14422,"##dling":14423,"violated":14424,"lafayette":14425,"jakarta":14426,"exploitation":14427,"##ime":14428,"infamous":14429,"iconic":14430,"##lah":14431,"##park":14432,"kitty":14433,"moody":14434,"reginald":14435,"dread":14436,"spill":14437,"crystals":14438,"olivier":14439,"modeled":14440,"bluff":14441,"equilibrium":14442,"separating":14443,"notices":14444,"ordnance":14445,"extinction":14446,"onset":14447,"cosmic":14448,"attachment":14449,"sammy":14450,"expose":14451,"privy":14452,"anchored":14453,"##bil":14454,"abbott":14455,"admits":14456,"bending":14457,"baritone":14458,"emmanuel":14459,"policeman":14460,"vaughan":14461,"winged":14462,"climax":14463,"dresses":14464,"denny":14465,"polytechnic":14466,"mohamed":14467,"burmese":14468,"authentic":14469,"nikki":14470,"genetics":14471,"grandparents":14472,"homestead":14473,"gaza":14474,"postponed":14475,"metacritic":14476,"una":14477,"##sby":14478,"##bat":14479,"unstable":14480,"dissertation":14481,"##rial":14482,"##cian":14483,"curls":14484,"obscure":14485,"uncovered":14486,"bronx":14487,"praying":14488,"disappearing":14489,"##hoe":14490,"prehistoric":14491,"coke":14492,"turret":14493,"mutations":14494,"nonprofit":14495,"pits":14496,"monaco":14497,"##ي":14498,"##usion":14499,"prominently":14500,"dispatched":14501,"podium":14502,"##mir":14503,"uci":14504,"##uation":14505,"133":14506,"fortifications":14507,"birthplace":14508,"kendall":14509,"##lby":14510,"##oll":14511,"preacher":14512,"rack":14513,"goodman":14514,"##rman":14515,"persistent":14516,"##ott":14517,"countless":14518,"jaime":14519,"recorder":14520,"lexington":14521,"persecution":14522,"jumps":14523,"renewal":14524,"wagons":14525,"##11":14526,"crushing":14527,"##holder":14528,"decorations":14529,"##lake":14530,"abundance":14531,"wrath":14532,"laundry":14533,"£1":14534,"garde":14535,"##rp":14536,"jeanne":14537,"beetles":14538,"peasant":14539,"##sl":14540,"splitting":14541,"caste":14542,"sergei":14543,"##rer":14544,"##ema":14545,"scripts":14546,"##ively":14547,"rub":14548,"satellites":14549,"##vor":14550,"inscribed":14551,"verlag":14552,"scrapped":14553,"gale":14554,"packages":14555,"chick":14556,"potato":14557,"slogan":14558,"kathleen":14559,"arabs":14560,"##culture":14561,"counterparts":14562,"reminiscent":14563,"choral":14564,"##tead":14565,"rand":14566,"retains":14567,"bushes":14568,"dane":14569,"accomplish":14570,"courtesy":14571,"closes":14572,"##oth":14573,"slaughter":14574,"hague":14575,"krakow":14576,"lawson":14577,"tailed":14578,"elias":14579,"ginger":14580,"##ttes":14581,"canopy":14582,"betrayal":14583,"rebuilding":14584,"turf":14585,"##hof":14586,"frowning":14587,"allegiance":14588,"brigades":14589,"kicks":14590,"rebuild":14591,"polls":14592,"alias":14593,"nationalism":14594,"td":14595,"rowan":14596,"audition":14597,"bowie":14598,"fortunately":14599,"recognizes":14600,"harp":14601,"dillon":14602,"horrified":14603,"##oro":14604,"renault":14605,"##tics":14606,"ropes":14607,"##α":14608,"presumed":14609,"rewarded":14610,"infrared":14611,"wiping":14612,"accelerated":14613,"illustration":14614,"##rid":14615,"presses":14616,"practitioners":14617,"badminton":14618,"##iard":14619,"detained":14620,"##tera":14621,"recognizing":14622,"relates":14623,"misery":14624,"##sies":14625,"##tly":14626,"reproduction":14627,"piercing":14628,"potatoes":14629,"thornton":14630,"esther":14631,"manners":14632,"hbo":14633,"##aan":14634,"ours":14635,"bullshit":14636,"ernie":14637,"perennial":14638,"sensitivity":14639,"illuminated":14640,"rupert":14641,"##jin":14642,"##iss":14643,"##ear":14644,"rfc":14645,"nassau":14646,"##dock":14647,"staggered":14648,"socialism":14649,"##haven":14650,"appointments":14651,"nonsense":14652,"prestige":14653,"sharma":14654,"haul":14655,"##tical":14656,"solidarity":14657,"gps":14658,"##ook":14659,"##rata":14660,"igor":14661,"pedestrian":14662,"##uit":14663,"baxter":14664,"tenants":14665,"wires":14666,"medication":14667,"unlimited":14668,"guiding":14669,"impacts":14670,"diabetes":14671,"##rama":14672,"sasha":14673,"pas":14674,"clive":14675,"extraction":14676,"131":14677,"continually":14678,"constraints":14679,"##bilities":14680,"sonata":14681,"hunted":14682,"sixteenth":14683,"chu":14684,"planting":14685,"quote":14686,"mayer":14687,"pretended":14688,"abs":14689,"spat":14690,"##hua":14691,"ceramic":14692,"##cci":14693,"curtains":14694,"pigs":14695,"pitching":14696,"##dad":14697,"latvian":14698,"sore":14699,"dayton":14700,"##sted":14701,"##qi":14702,"patrols":14703,"slice":14704,"playground":14705,"##nted":14706,"shone":14707,"stool":14708,"apparatus":14709,"inadequate":14710,"mates":14711,"treason":14712,"##ija":14713,"desires":14714,"##liga":14715,"##croft":14716,"somalia":14717,"laurent":14718,"mir":14719,"leonardo":14720,"oracle":14721,"grape":14722,"obliged":14723,"chevrolet":14724,"thirteenth":14725,"stunning":14726,"enthusiastic":14727,"##ede":14728,"accounted":14729,"concludes":14730,"currents":14731,"basil":14732,"##kovic":14733,"drought":14734,"##rica":14735,"mai":14736,"##aire":14737,"shove":14738,"posting":14739,"##shed":14740,"pilgrimage":14741,"humorous":14742,"packing":14743,"fry":14744,"pencil":14745,"wines":14746,"smells":14747,"144":14748,"marilyn":14749,"aching":14750,"newest":14751,"clung":14752,"bon":14753,"neighbours":14754,"sanctioned":14755,"##pie":14756,"mug":14757,"##stock":14758,"drowning":14759,"##mma":14760,"hydraulic":14761,"##vil":14762,"hiring":14763,"reminder":14764,"lilly":14765,"investigators":14766,"##ncies":14767,"sour":14768,"##eous":14769,"compulsory":14770,"packet":14771,"##rion":14772,"##graphic":14773,"##elle":14774,"cannes":14775,"##inate":14776,"depressed":14777,"##rit":14778,"heroic":14779,"importantly":14780,"theresa":14781,"##tled":14782,"conway":14783,"saturn":14784,"marginal":14785,"rae":14786,"##xia":14787,"corresponds":14788,"royce":14789,"pact":14790,"jasper":14791,"explosives":14792,"packaging":14793,"aluminium":14794,"##ttered":14795,"denotes":14796,"rhythmic":14797,"spans":14798,"assignments":14799,"hereditary":14800,"outlined":14801,"originating":14802,"sundays":14803,"lad":14804,"reissued":14805,"greeting":14806,"beatrice":14807,"##dic":14808,"pillar":14809,"marcos":14810,"plots":14811,"handbook":14812,"alcoholic":14813,"judiciary":14814,"avant":14815,"slides":14816,"extract":14817,"masculine":14818,"blur":14819,"##eum":14820,"##force":14821,"homage":14822,"trembled":14823,"owens":14824,"hymn":14825,"trey":14826,"omega":14827,"signaling":14828,"socks":14829,"accumulated":14830,"reacted":14831,"attic":14832,"theo":14833,"lining":14834,"angie":14835,"distraction":14836,"primera":14837,"talbot":14838,"##key":14839,"1200":14840,"ti":14841,"creativity":14842,"billed":14843,"##hey":14844,"deacon":14845,"eduardo":14846,"identifies":14847,"proposition":14848,"dizzy":14849,"gunner":14850,"hogan":14851,"##yam":14852,"##pping":14853,"##hol":14854,"ja":14855,"##chan":14856,"jensen":14857,"reconstructed":14858,"##berger":14859,"clearance":14860,"darius":14861,"##nier":14862,"abe":14863,"harlem":14864,"plea":14865,"dei":14866,"circled":14867,"emotionally":14868,"notation":14869,"fascist":14870,"neville":14871,"exceeded":14872,"upwards":14873,"viable":14874,"ducks":14875,"##fo":14876,"workforce":14877,"racer":14878,"limiting":14879,"shri":14880,"##lson":14881,"possesses":14882,"1600":14883,"kerr":14884,"moths":14885,"devastating":14886,"laden":14887,"disturbing":14888,"locking":14889,"##cture":14890,"gal":14891,"fearing":14892,"accreditation":14893,"flavor":14894,"aide":14895,"1870s":14896,"mountainous":14897,"##baum":14898,"melt":14899,"##ures":14900,"motel":14901,"texture":14902,"servers":14903,"soda":14904,"##mb":14905,"herd":14906,"##nium":14907,"erect":14908,"puzzled":14909,"hum":14910,"peggy":14911,"examinations":14912,"gould":14913,"testified":14914,"geoff":14915,"ren":14916,"devised":14917,"sacks":14918,"##law":14919,"denial":14920,"posters":14921,"grunted":14922,"cesar":14923,"tutor":14924,"ec":14925,"gerry":14926,"offerings":14927,"byrne":14928,"falcons":14929,"combinations":14930,"ct":14931,"incoming":14932,"pardon":14933,"rocking":14934,"26th":14935,"avengers":14936,"flared":14937,"mankind":14938,"seller":14939,"uttar":14940,"loch":14941,"nadia":14942,"stroking":14943,"exposing":14944,"##hd":14945,"fertile":14946,"ancestral":14947,"instituted":14948,"##has":14949,"noises":14950,"prophecy":14951,"taxation":14952,"eminent":14953,"vivid":14954,"pol":14955,"##bol":14956,"dart":14957,"indirect":14958,"multimedia":14959,"notebook":14960,"upside":14961,"displaying":14962,"adrenaline":14963,"referenced":14964,"geometric":14965,"##iving":14966,"progression":14967,"##ddy":14968,"blunt":14969,"announce":14970,"##far":14971,"implementing":14972,"##lav":14973,"aggression":14974,"liaison":14975,"cooler":14976,"cares":14977,"headache":14978,"plantations":14979,"gorge":14980,"dots":14981,"impulse":14982,"thickness":14983,"ashamed":14984,"averaging":14985,"kathy":14986,"obligation":14987,"precursor":14988,"137":14989,"fowler":14990,"symmetry":14991,"thee":14992,"225":14993,"hears":14994,"##rai":14995,"undergoing":14996,"ads":14997,"butcher":14998,"bowler":14999,"##lip":15000,"cigarettes":15001,"subscription":15002,"goodness":15003,"##ically":15004,"browne":15005,"##hos":15006,"##tech":15007,"kyoto":15008,"donor":15009,"##erty":15010,"damaging":15011,"friction":15012,"drifting":15013,"expeditions":15014,"hardened":15015,"prostitution":15016,"152":15017,"fauna":15018,"blankets":15019,"claw":15020,"tossing":15021,"snarled":15022,"butterflies":15023,"recruits":15024,"investigative":15025,"coated":15026,"healed":15027,"138":15028,"communal":15029,"hai":15030,"xiii":15031,"academics":15032,"boone":15033,"psychologist":15034,"restless":15035,"lahore":15036,"stephens":15037,"mba":15038,"brendan":15039,"foreigners":15040,"printer":15041,"##pc":15042,"ached":15043,"explode":15044,"27th":15045,"deed":15046,"scratched":15047,"dared":15048,"##pole":15049,"cardiac":15050,"1780":15051,"okinawa":15052,"proto":15053,"commando":15054,"compelled":15055,"oddly":15056,"electrons":15057,"##base":15058,"replica":15059,"thanksgiving":15060,"##rist":15061,"sheila":15062,"deliberate":15063,"stafford":15064,"tidal":15065,"representations":15066,"hercules":15067,"ou":15068,"##path":15069,"##iated":15070,"kidnapping":15071,"lenses":15072,"##tling":15073,"deficit":15074,"samoa":15075,"mouths":15076,"consuming":15077,"computational":15078,"maze":15079,"granting":15080,"smirk":15081,"razor":15082,"fixture":15083,"ideals":15084,"inviting":15085,"aiden":15086,"nominal":15087,"##vs":15088,"issuing":15089,"julio":15090,"pitt":15091,"ramsey":15092,"docks":15093,"##oss":15094,"exhaust":15095,"##owed":15096,"bavarian":15097,"draped":15098,"anterior":15099,"mating":15100,"ethiopian":15101,"explores":15102,"noticing":15103,"##nton":15104,"discarded":15105,"convenience":15106,"hoffman":15107,"endowment":15108,"beasts":15109,"cartridge":15110,"mormon":15111,"paternal":15112,"probe":15113,"sleeves":15114,"interfere":15115,"lump":15116,"deadline":15117,"##rail":15118,"jenks":15119,"bulldogs":15120,"scrap":15121,"alternating":15122,"justified":15123,"reproductive":15124,"nam":15125,"seize":15126,"descending":15127,"secretariat":15128,"kirby":15129,"coupe":15130,"grouped":15131,"smash":15132,"panther":15133,"sedan":15134,"tapping":15135,"##18":15136,"lola":15137,"cheer":15138,"germanic":15139,"unfortunate":15140,"##eter":15141,"unrelated":15142,"##fan":15143,"subordinate":15144,"##sdale":15145,"suzanne":15146,"advertisement":15147,"##ility":15148,"horsepower":15149,"##lda":15150,"cautiously":15151,"discourse":15152,"luigi":15153,"##mans":15154,"##fields":15155,"noun":15156,"prevalent":15157,"mao":15158,"schneider":15159,"everett":15160,"surround":15161,"governorate":15162,"kira":15163,"##avia":15164,"westward":15165,"##take":15166,"misty":15167,"rails":15168,"sustainability":15169,"134":15170,"unused":15171,"##rating":15172,"packs":15173,"toast":15174,"unwilling":15175,"regulate":15176,"thy":15177,"suffrage":15178,"nile":15179,"awe":15180,"assam":15181,"definitions":15182,"travelers":15183,"affordable":15184,"##rb":15185,"conferred":15186,"sells":15187,"undefeated":15188,"beneficial":15189,"torso":15190,"basal":15191,"repeating":15192,"remixes":15193,"##pass":15194,"bahrain":15195,"cables":15196,"fang":15197,"##itated":15198,"excavated":15199,"numbering":15200,"statutory":15201,"##rey":15202,"deluxe":15203,"##lian":15204,"forested":15205,"ramirez":15206,"derbyshire":15207,"zeus":15208,"slamming":15209,"transfers":15210,"astronomer":15211,"banana":15212,"lottery":15213,"berg":15214,"histories":15215,"bamboo":15216,"##uchi":15217,"resurrection":15218,"posterior":15219,"bowls":15220,"vaguely":15221,"##thi":15222,"thou":15223,"preserving":15224,"tensed":15225,"offence":15226,"##inas":15227,"meyrick":15228,"callum":15229,"ridden":15230,"watt":15231,"langdon":15232,"tying":15233,"lowland":15234,"snorted":15235,"daring":15236,"truman":15237,"##hale":15238,"##girl":15239,"aura":15240,"overly":15241,"filing":15242,"weighing":15243,"goa":15244,"infections":15245,"philanthropist":15246,"saunders":15247,"eponymous":15248,"##owski":15249,"latitude":15250,"perspectives":15251,"reviewing":15252,"mets":15253,"commandant":15254,"radial":15255,"##kha":15256,"flashlight":15257,"reliability":15258,"koch":15259,"vowels":15260,"amazed":15261,"ada":15262,"elaine":15263,"supper":15264,"##rth":15265,"##encies":15266,"predator":15267,"debated":15268,"soviets":15269,"cola":15270,"##boards":15271,"##nah":15272,"compartment":15273,"crooked":15274,"arbitrary":15275,"fourteenth":15276,"##ctive":15277,"havana":15278,"majors":15279,"steelers":15280,"clips":15281,"profitable":15282,"ambush":15283,"exited":15284,"packers":15285,"##tile":15286,"nude":15287,"cracks":15288,"fungi":15289,"##е":15290,"limb":15291,"trousers":15292,"josie":15293,"shelby":15294,"tens":15295,"frederic":15296,"##ος":15297,"definite":15298,"smoothly":15299,"constellation":15300,"insult":15301,"baton":15302,"discs":15303,"lingering":15304,"##nco":15305,"conclusions":15306,"lent":15307,"staging":15308,"becker":15309,"grandpa":15310,"shaky":15311,"##tron":15312,"einstein":15313,"obstacles":15314,"sk":15315,"adverse":15316,"elle":15317,"economically":15318,"##moto":15319,"mccartney":15320,"thor":15321,"dismissal":15322,"motions":15323,"readings":15324,"nostrils":15325,"treatise":15326,"##pace":15327,"squeezing":15328,"evidently":15329,"prolonged":15330,"1783":15331,"venezuelan":15332,"je":15333,"marguerite":15334,"beirut":15335,"takeover":15336,"shareholders":15337,"##vent":15338,"denise":15339,"digit":15340,"airplay":15341,"norse":15342,"##bbling":15343,"imaginary":15344,"pills":15345,"hubert":15346,"blaze":15347,"vacated":15348,"eliminating":15349,"##ello":15350,"vine":15351,"mansfield":15352,"##tty":15353,"retrospective":15354,"barrow":15355,"borne":15356,"clutch":15357,"bail":15358,"forensic":15359,"weaving":15360,"##nett":15361,"##witz":15362,"desktop":15363,"citadel":15364,"promotions":15365,"worrying":15366,"dorset":15367,"ieee":15368,"subdivided":15369,"##iating":15370,"manned":15371,"expeditionary":15372,"pickup":15373,"synod":15374,"chuckle":15375,"185":15376,"barney":15377,"##rz":15378,"##ffin":15379,"functionality":15380,"karachi":15381,"litigation":15382,"meanings":15383,"uc":15384,"lick":15385,"turbo":15386,"anders":15387,"##ffed":15388,"execute":15389,"curl":15390,"oppose":15391,"ankles":15392,"typhoon":15393,"##د":15394,"##ache":15395,"##asia":15396,"linguistics":15397,"compassion":15398,"pressures":15399,"grazing":15400,"perfection":15401,"##iting":15402,"immunity":15403,"monopoly":15404,"muddy":15405,"backgrounds":15406,"136":15407,"namibia":15408,"francesca":15409,"monitors":15410,"attracting":15411,"stunt":15412,"tuition":15413,"##ии":15414,"vegetable":15415,"##mates":15416,"##quent":15417,"mgm":15418,"jen":15419,"complexes":15420,"forts":15421,"##ond":15422,"cellar":15423,"bites":15424,"seventeenth":15425,"royals":15426,"flemish":15427,"failures":15428,"mast":15429,"charities":15430,"##cular":15431,"peruvian":15432,"capitals":15433,"macmillan":15434,"ipswich":15435,"outward":15436,"frigate":15437,"postgraduate":15438,"folds":15439,"employing":15440,"##ouse":15441,"concurrently":15442,"fiery":15443,"##tai":15444,"contingent":15445,"nightmares":15446,"monumental":15447,"nicaragua":15448,"##kowski":15449,"lizard":15450,"mal":15451,"fielding":15452,"gig":15453,"reject":15454,"##pad":15455,"harding":15456,"##ipe":15457,"coastline":15458,"##cin":15459,"##nos":15460,"beethoven":15461,"humphrey":15462,"innovations":15463,"##tam":15464,"##nge":15465,"norris":15466,"doris":15467,"solicitor":15468,"huang":15469,"obey":15470,"141":15471,"##lc":15472,"niagara":15473,"##tton":15474,"shelves":15475,"aug":15476,"bourbon":15477,"curry":15478,"nightclub":15479,"specifications":15480,"hilton":15481,"##ndo":15482,"centennial":15483,"dispersed":15484,"worm":15485,"neglected":15486,"briggs":15487,"sm":15488,"font":15489,"kuala":15490,"uneasy":15491,"plc":15492,"##nstein":15493,"##bound":15494,"##aking":15495,"##burgh":15496,"awaiting":15497,"pronunciation":15498,"##bbed":15499,"##quest":15500,"eh":15501,"optimal":15502,"zhu":15503,"raped":15504,"greens":15505,"presided":15506,"brenda":15507,"worries":15508,"##life":15509,"venetian":15510,"marxist":15511,"turnout":15512,"##lius":15513,"refined":15514,"braced":15515,"sins":15516,"grasped":15517,"sunderland":15518,"nickel":15519,"speculated":15520,"lowell":15521,"cyrillic":15522,"communism":15523,"fundraising":15524,"resembling":15525,"colonists":15526,"mutant":15527,"freddie":15528,"usc":15529,"##mos":15530,"gratitude":15531,"##run":15532,"mural":15533,"##lous":15534,"chemist":15535,"wi":15536,"reminds":15537,"28th":15538,"steals":15539,"tess":15540,"pietro":15541,"##ingen":15542,"promoter":15543,"ri":15544,"microphone":15545,"honoured":15546,"rai":15547,"sant":15548,"##qui":15549,"feather":15550,"##nson":15551,"burlington":15552,"kurdish":15553,"terrorists":15554,"deborah":15555,"sickness":15556,"##wed":15557,"##eet":15558,"hazard":15559,"irritated":15560,"desperation":15561,"veil":15562,"clarity":15563,"##rik":15564,"jewels":15565,"xv":15566,"##gged":15567,"##ows":15568,"##cup":15569,"berkshire":15570,"unfair":15571,"mysteries":15572,"orchid":15573,"winced":15574,"exhaustion":15575,"renovations":15576,"stranded":15577,"obe":15578,"infinity":15579,"##nies":15580,"adapt":15581,"redevelopment":15582,"thanked":15583,"registry":15584,"olga":15585,"domingo":15586,"noir":15587,"tudor":15588,"ole":15589,"##atus":15590,"commenting":15591,"behaviors":15592,"##ais":15593,"crisp":15594,"pauline":15595,"probable":15596,"stirling":15597,"wigan":15598,"##bian":15599,"paralympics":15600,"panting":15601,"surpassed":15602,"##rew":15603,"luca":15604,"barred":15605,"pony":15606,"famed":15607,"##sters":15608,"cassandra":15609,"waiter":15610,"carolyn":15611,"exported":15612,"##orted":15613,"andres":15614,"destructive":15615,"deeds":15616,"jonah":15617,"castles":15618,"vacancy":15619,"suv":15620,"##glass":15621,"1788":15622,"orchard":15623,"yep":15624,"famine":15625,"belarusian":15626,"sprang":15627,"##forth":15628,"skinny":15629,"##mis":15630,"administrators":15631,"rotterdam":15632,"zambia":15633,"zhao":15634,"boiler":15635,"discoveries":15636,"##ride":15637,"##physics":15638,"lucius":15639,"disappointing":15640,"outreach":15641,"spoon":15642,"##frame":15643,"qualifications":15644,"unanimously":15645,"enjoys":15646,"regency":15647,"##iidae":15648,"stade":15649,"realism":15650,"veterinary":15651,"rodgers":15652,"dump":15653,"alain":15654,"chestnut":15655,"castile":15656,"censorship":15657,"rumble":15658,"gibbs":15659,"##itor":15660,"communion":15661,"reggae":15662,"inactivated":15663,"logs":15664,"loads":15665,"##houses":15666,"homosexual":15667,"##iano":15668,"ale":15669,"informs":15670,"##cas":15671,"phrases":15672,"plaster":15673,"linebacker":15674,"ambrose":15675,"kaiser":15676,"fascinated":15677,"850":15678,"limerick":15679,"recruitment":15680,"forge":15681,"mastered":15682,"##nding":15683,"leinster":15684,"rooted":15685,"threaten":15686,"##strom":15687,"borneo":15688,"##hes":15689,"suggestions":15690,"scholarships":15691,"propeller":15692,"documentaries":15693,"patronage":15694,"coats":15695,"constructing":15696,"invest":15697,"neurons":15698,"comet":15699,"entirety":15700,"shouts":15701,"identities":15702,"annoying":15703,"unchanged":15704,"wary":15705,"##antly":15706,"##ogy":15707,"neat":15708,"oversight":15709,"##kos":15710,"phillies":15711,"replay":15712,"constance":15713,"##kka":15714,"incarnation":15715,"humble":15716,"skies":15717,"minus":15718,"##acy":15719,"smithsonian":15720,"##chel":15721,"guerrilla":15722,"jar":15723,"cadets":15724,"##plate":15725,"surplus":15726,"audit":15727,"##aru":15728,"cracking":15729,"joanna":15730,"louisa":15731,"pacing":15732,"##lights":15733,"intentionally":15734,"##iri":15735,"diner":15736,"nwa":15737,"imprint":15738,"australians":15739,"tong":15740,"unprecedented":15741,"bunker":15742,"naive":15743,"specialists":15744,"ark":15745,"nichols":15746,"railing":15747,"leaked":15748,"pedal":15749,"##uka":15750,"shrub":15751,"longing":15752,"roofs":15753,"v8":15754,"captains":15755,"neural":15756,"tuned":15757,"##ntal":15758,"##jet":15759,"emission":15760,"medina":15761,"frantic":15762,"codex":15763,"definitive":15764,"sid":15765,"abolition":15766,"intensified":15767,"stocks":15768,"enrique":15769,"sustain":15770,"genoa":15771,"oxide":15772,"##written":15773,"clues":15774,"cha":15775,"##gers":15776,"tributaries":15777,"fragment":15778,"venom":15779,"##rity":15780,"##ente":15781,"##sca":15782,"muffled":15783,"vain":15784,"sire":15785,"laos":15786,"##ingly":15787,"##hana":15788,"hastily":15789,"snapping":15790,"surfaced":15791,"sentiment":15792,"motive":15793,"##oft":15794,"contests":15795,"approximate":15796,"mesa":15797,"luckily":15798,"dinosaur":15799,"exchanges":15800,"propelled":15801,"accord":15802,"bourne":15803,"relieve":15804,"tow":15805,"masks":15806,"offended":15807,"##ues":15808,"cynthia":15809,"##mmer":15810,"rains":15811,"bartender":15812,"zinc":15813,"reviewers":15814,"lois":15815,"##sai":15816,"legged":15817,"arrogant":15818,"rafe":15819,"rosie":15820,"comprise":15821,"handicap":15822,"blockade":15823,"inlet":15824,"lagoon":15825,"copied":15826,"drilling":15827,"shelley":15828,"petals":15829,"##inian":15830,"mandarin":15831,"obsolete":15832,"##inated":15833,"onward":15834,"arguably":15835,"productivity":15836,"cindy":15837,"praising":15838,"seldom":15839,"busch":15840,"discusses":15841,"raleigh":15842,"shortage":15843,"ranged":15844,"stanton":15845,"encouragement":15846,"firstly":15847,"conceded":15848,"overs":15849,"temporal":15850,"##uke":15851,"cbe":15852,"##bos":15853,"woo":15854,"certainty":15855,"pumps":15856,"##pton":15857,"stalked":15858,"##uli":15859,"lizzie":15860,"periodic":15861,"thieves":15862,"weaker":15863,"##night":15864,"gases":15865,"shoving":15866,"chooses":15867,"wc":15868,"##chemical":15869,"prompting":15870,"weights":15871,"##kill":15872,"robust":15873,"flanked":15874,"sticky":15875,"hu":15876,"tuberculosis":15877,"##eb":15878,"##eal":15879,"christchurch":15880,"resembled":15881,"wallet":15882,"reese":15883,"inappropriate":15884,"pictured":15885,"distract":15886,"fixing":15887,"fiddle":15888,"giggled":15889,"burger":15890,"heirs":15891,"hairy":15892,"mechanic":15893,"torque":15894,"apache":15895,"obsessed":15896,"chiefly":15897,"cheng":15898,"logging":15899,"##tag":15900,"extracted":15901,"meaningful":15902,"numb":15903,"##vsky":15904,"gloucestershire":15905,"reminding":15906,"##bay":15907,"unite":15908,"##lit":15909,"breeds":15910,"diminished":15911,"clown":15912,"glove":15913,"1860s":15914,"##ن":15915,"##ug":15916,"archibald":15917,"focal":15918,"freelance":15919,"sliced":15920,"depiction":15921,"##yk":15922,"organism":15923,"switches":15924,"sights":15925,"stray":15926,"crawling":15927,"##ril":15928,"lever":15929,"leningrad":15930,"interpretations":15931,"loops":15932,"anytime":15933,"reel":15934,"alicia":15935,"delighted":15936,"##ech":15937,"inhaled":15938,"xiv":15939,"suitcase":15940,"bernie":15941,"vega":15942,"licenses":15943,"northampton":15944,"exclusion":15945,"induction":15946,"monasteries":15947,"racecourse":15948,"homosexuality":15949,"##right":15950,"##sfield":15951,"##rky":15952,"dimitri":15953,"michele":15954,"alternatives":15955,"ions":15956,"commentators":15957,"genuinely":15958,"objected":15959,"pork":15960,"hospitality":15961,"fencing":15962,"stephan":15963,"warships":15964,"peripheral":15965,"wit":15966,"drunken":15967,"wrinkled":15968,"quentin":15969,"spends":15970,"departing":15971,"chung":15972,"numerical":15973,"spokesperson":15974,"##zone":15975,"johannesburg":15976,"caliber":15977,"killers":15978,"##udge":15979,"assumes":15980,"neatly":15981,"demographic":15982,"abigail":15983,"bloc":15984,"##vel":15985,"mounting":15986,"##lain":15987,"bentley":15988,"slightest":15989,"xu":15990,"recipients":15991,"##jk":15992,"merlin":15993,"##writer":15994,"seniors":15995,"prisons":15996,"blinking":15997,"hindwings":15998,"flickered":15999,"kappa":16000,"##hel":16001,"80s":16002,"strengthening":16003,"appealing":16004,"brewing":16005,"gypsy":16006,"mali":16007,"lashes":16008,"hulk":16009,"unpleasant":16010,"harassment":16011,"bio":16012,"treaties":16013,"predict":16014,"instrumentation":16015,"pulp":16016,"troupe":16017,"boiling":16018,"mantle":16019,"##ffe":16020,"ins":16021,"##vn":16022,"dividing":16023,"handles":16024,"verbs":16025,"##onal":16026,"coconut":16027,"senegal":16028,"340":16029,"thorough":16030,"gum":16031,"momentarily":16032,"##sto":16033,"cocaine":16034,"panicked":16035,"destined":16036,"##turing":16037,"teatro":16038,"denying":16039,"weary":16040,"captained":16041,"mans":16042,"##hawks":16043,"##code":16044,"wakefield":16045,"bollywood":16046,"thankfully":16047,"##16":16048,"cyril":16049,"##wu":16050,"amendments":16051,"##bahn":16052,"consultation":16053,"stud":16054,"reflections":16055,"kindness":16056,"1787":16057,"internally":16058,"##ovo":16059,"tex":16060,"mosaic":16061,"distribute":16062,"paddy":16063,"seeming":16064,"143":16065,"##hic":16066,"piers":16067,"##15":16068,"##mura":16069,"##verse":16070,"popularly":16071,"winger":16072,"kang":16073,"sentinel":16074,"mccoy":16075,"##anza":16076,"covenant":16077,"##bag":16078,"verge":16079,"fireworks":16080,"suppress":16081,"thrilled":16082,"dominate":16083,"##jar":16084,"swansea":16085,"##60":16086,"142":16087,"reconciliation":16088,"##ndi":16089,"stiffened":16090,"cue":16091,"dorian":16092,"##uf":16093,"damascus":16094,"amor":16095,"ida":16096,"foremost":16097,"##aga":16098,"porsche":16099,"unseen":16100,"dir":16101,"##had":16102,"##azi":16103,"stony":16104,"lexi":16105,"melodies":16106,"##nko":16107,"angular":16108,"integer":16109,"podcast":16110,"ants":16111,"inherent":16112,"jaws":16113,"justify":16114,"persona":16115,"##olved":16116,"josephine":16117,"##nr":16118,"##ressed":16119,"customary":16120,"flashes":16121,"gala":16122,"cyrus":16123,"glaring":16124,"backyard":16125,"ariel":16126,"physiology":16127,"greenland":16128,"html":16129,"stir":16130,"avon":16131,"atletico":16132,"finch":16133,"methodology":16134,"ked":16135,"##lent":16136,"mas":16137,"catholicism":16138,"townsend":16139,"branding":16140,"quincy":16141,"fits":16142,"containers":16143,"1777":16144,"ashore":16145,"aragon":16146,"##19":16147,"forearm":16148,"poisoning":16149,"##sd":16150,"adopting":16151,"conquer":16152,"grinding":16153,"amnesty":16154,"keller":16155,"finances":16156,"evaluate":16157,"forged":16158,"lankan":16159,"instincts":16160,"##uto":16161,"guam":16162,"bosnian":16163,"photographed":16164,"workplace":16165,"desirable":16166,"protector":16167,"##dog":16168,"allocation":16169,"intently":16170,"encourages":16171,"willy":16172,"##sten":16173,"bodyguard":16174,"electro":16175,"brighter":16176,"##ν":16177,"bihar":16178,"##chev":16179,"lasts":16180,"opener":16181,"amphibious":16182,"sal":16183,"verde":16184,"arte":16185,"##cope":16186,"captivity":16187,"vocabulary":16188,"yields":16189,"##tted":16190,"agreeing":16191,"desmond":16192,"pioneered":16193,"##chus":16194,"strap":16195,"campaigned":16196,"railroads":16197,"##ович":16198,"emblem":16199,"##dre":16200,"stormed":16201,"501":16202,"##ulous":16203,"marijuana":16204,"northumberland":16205,"##gn":16206,"##nath":16207,"bowen":16208,"landmarks":16209,"beaumont":16210,"##qua":16211,"danube":16212,"##bler":16213,"attorneys":16214,"th":16215,"ge":16216,"flyers":16217,"critique":16218,"villains":16219,"cass":16220,"mutation":16221,"acc":16222,"##0s":16223,"colombo":16224,"mckay":16225,"motif":16226,"sampling":16227,"concluding":16228,"syndicate":16229,"##rell":16230,"neon":16231,"stables":16232,"ds":16233,"warnings":16234,"clint":16235,"mourning":16236,"wilkinson":16237,"##tated":16238,"merrill":16239,"leopard":16240,"evenings":16241,"exhaled":16242,"emil":16243,"sonia":16244,"ezra":16245,"discrete":16246,"stove":16247,"farrell":16248,"fifteenth":16249,"prescribed":16250,"superhero":16251,"##rier":16252,"worms":16253,"helm":16254,"wren":16255,"##duction":16256,"##hc":16257,"expo":16258,"##rator":16259,"hq":16260,"unfamiliar":16261,"antony":16262,"prevents":16263,"acceleration":16264,"fiercely":16265,"mari":16266,"painfully":16267,"calculations":16268,"cheaper":16269,"ign":16270,"clifton":16271,"irvine":16272,"davenport":16273,"mozambique":16274,"##np":16275,"pierced":16276,"##evich":16277,"wonders":16278,"##wig":16279,"##cate":16280,"##iling":16281,"crusade":16282,"ware":16283,"##uel":16284,"enzymes":16285,"reasonably":16286,"mls":16287,"##coe":16288,"mater":16289,"ambition":16290,"bunny":16291,"eliot":16292,"kernel":16293,"##fin":16294,"asphalt":16295,"headmaster":16296,"torah":16297,"aden":16298,"lush":16299,"pins":16300,"waived":16301,"##care":16302,"##yas":16303,"joao":16304,"substrate":16305,"enforce":16306,"##grad":16307,"##ules":16308,"alvarez":16309,"selections":16310,"epidemic":16311,"tempted":16312,"##bit":16313,"bremen":16314,"translates":16315,"ensured":16316,"waterfront":16317,"29th":16318,"forrest":16319,"manny":16320,"malone":16321,"kramer":16322,"reigning":16323,"cookies":16324,"simpler":16325,"absorption":16326,"205":16327,"engraved":16328,"##ffy":16329,"evaluated":16330,"1778":16331,"haze":16332,"146":16333,"comforting":16334,"crossover":16335,"##abe":16336,"thorn":16337,"##rift":16338,"##imo":16339,"##pop":16340,"suppression":16341,"fatigue":16342,"cutter":16343,"##tr":16344,"201":16345,"wurttemberg":16346,"##orf":16347,"enforced":16348,"hovering":16349,"proprietary":16350,"gb":16351,"samurai":16352,"syllable":16353,"ascent":16354,"lacey":16355,"tick":16356,"lars":16357,"tractor":16358,"merchandise":16359,"rep":16360,"bouncing":16361,"defendants":16362,"##yre":16363,"huntington":16364,"##ground":16365,"##oko":16366,"standardized":16367,"##hor":16368,"##hima":16369,"assassinated":16370,"nu":16371,"predecessors":16372,"rainy":16373,"liar":16374,"assurance":16375,"lyrical":16376,"##uga":16377,"secondly":16378,"flattened":16379,"ios":16380,"parameter":16381,"undercover":16382,"##mity":16383,"bordeaux":16384,"punish":16385,"ridges":16386,"markers":16387,"exodus":16388,"inactive":16389,"hesitate":16390,"debbie":16391,"nyc":16392,"pledge":16393,"savoy":16394,"nagar":16395,"offset":16396,"organist":16397,"##tium":16398,"hesse":16399,"marin":16400,"converting":16401,"##iver":16402,"diagram":16403,"propulsion":16404,"pu":16405,"validity":16406,"reverted":16407,"supportive":16408,"##dc":16409,"ministries":16410,"clans":16411,"responds":16412,"proclamation":16413,"##inae":16414,"##ø":16415,"##rea":16416,"ein":16417,"pleading":16418,"patriot":16419,"sf":16420,"birch":16421,"islanders":16422,"strauss":16423,"hates":16424,"##dh":16425,"brandenburg":16426,"concession":16427,"rd":16428,"##ob":16429,"1900s":16430,"killings":16431,"textbook":16432,"antiquity":16433,"cinematography":16434,"wharf":16435,"embarrassing":16436,"setup":16437,"creed":16438,"farmland":16439,"inequality":16440,"centred":16441,"signatures":16442,"fallon":16443,"370":16444,"##ingham":16445,"##uts":16446,"ceylon":16447,"gazing":16448,"directive":16449,"laurie":16450,"##tern":16451,"globally":16452,"##uated":16453,"##dent":16454,"allah":16455,"excavation":16456,"threads":16457,"##cross":16458,"148":16459,"frantically":16460,"icc":16461,"utilize":16462,"determines":16463,"respiratory":16464,"thoughtful":16465,"receptions":16466,"##dicate":16467,"merging":16468,"chandra":16469,"seine":16470,"147":16471,"builders":16472,"builds":16473,"diagnostic":16474,"dev":16475,"visibility":16476,"goddamn":16477,"analyses":16478,"dhaka":16479,"cho":16480,"proves":16481,"chancel":16482,"concurrent":16483,"curiously":16484,"canadians":16485,"pumped":16486,"restoring":16487,"1850s":16488,"turtles":16489,"jaguar":16490,"sinister":16491,"spinal":16492,"traction":16493,"declan":16494,"vows":16495,"1784":16496,"glowed":16497,"capitalism":16498,"swirling":16499,"install":16500,"universidad":16501,"##lder":16502,"##oat":16503,"soloist":16504,"##genic":16505,"##oor":16506,"coincidence":16507,"beginnings":16508,"nissan":16509,"dip":16510,"resorts":16511,"caucasus":16512,"combustion":16513,"infectious":16514,"##eno":16515,"pigeon":16516,"serpent":16517,"##itating":16518,"conclude":16519,"masked":16520,"salad":16521,"jew":16522,"##gr":16523,"surreal":16524,"toni":16525,"##wc":16526,"harmonica":16527,"151":16528,"##gins":16529,"##etic":16530,"##coat":16531,"fishermen":16532,"intending":16533,"bravery":16534,"##wave":16535,"klaus":16536,"titan":16537,"wembley":16538,"taiwanese":16539,"ransom":16540,"40th":16541,"incorrect":16542,"hussein":16543,"eyelids":16544,"jp":16545,"cooke":16546,"dramas":16547,"utilities":16548,"##etta":16549,"##print":16550,"eisenhower":16551,"principally":16552,"granada":16553,"lana":16554,"##rak":16555,"openings":16556,"concord":16557,"##bl":16558,"bethany":16559,"connie":16560,"morality":16561,"sega":16562,"##mons":16563,"##nard":16564,"earnings":16565,"##kara":16566,"##cine":16567,"wii":16568,"communes":16569,"##rel":16570,"coma":16571,"composing":16572,"softened":16573,"severed":16574,"grapes":16575,"##17":16576,"nguyen":16577,"analyzed":16578,"warlord":16579,"hubbard":16580,"heavenly":16581,"behave":16582,"slovenian":16583,"##hit":16584,"##ony":16585,"hailed":16586,"filmmakers":16587,"trance":16588,"caldwell":16589,"skye":16590,"unrest":16591,"coward":16592,"likelihood":16593,"##aging":16594,"bern":16595,"sci":16596,"taliban":16597,"honolulu":16598,"propose":16599,"##wang":16600,"1700":16601,"browser":16602,"imagining":16603,"cobra":16604,"contributes":16605,"dukes":16606,"instinctively":16607,"conan":16608,"violinist":16609,"##ores":16610,"accessories":16611,"gradual":16612,"##amp":16613,"quotes":16614,"sioux":16615,"##dating":16616,"undertake":16617,"intercepted":16618,"sparkling":16619,"compressed":16620,"139":16621,"fungus":16622,"tombs":16623,"haley":16624,"imposing":16625,"rests":16626,"degradation":16627,"lincolnshire":16628,"retailers":16629,"wetlands":16630,"tulsa":16631,"distributor":16632,"dungeon":16633,"nun":16634,"greenhouse":16635,"convey":16636,"atlantis":16637,"aft":16638,"exits":16639,"oman":16640,"dresser":16641,"lyons":16642,"##sti":16643,"joking":16644,"eddy":16645,"judgement":16646,"omitted":16647,"digits":16648,"##cts":16649,"##game":16650,"juniors":16651,"##rae":16652,"cents":16653,"stricken":16654,"une":16655,"##ngo":16656,"wizards":16657,"weir":16658,"breton":16659,"nan":16660,"technician":16661,"fibers":16662,"liking":16663,"royalty":16664,"##cca":16665,"154":16666,"persia":16667,"terribly":16668,"magician":16669,"##rable":16670,"##unt":16671,"vance":16672,"cafeteria":16673,"booker":16674,"camille":16675,"warmer":16676,"##static":16677,"consume":16678,"cavern":16679,"gaps":16680,"compass":16681,"contemporaries":16682,"foyer":16683,"soothing":16684,"graveyard":16685,"maj":16686,"plunged":16687,"blush":16688,"##wear":16689,"cascade":16690,"demonstrates":16691,"ordinance":16692,"##nov":16693,"boyle":16694,"##lana":16695,"rockefeller":16696,"shaken":16697,"banjo":16698,"izzy":16699,"##ense":16700,"breathless":16701,"vines":16702,"##32":16703,"##eman":16704,"alterations":16705,"chromosome":16706,"dwellings":16707,"feudal":16708,"mole":16709,"153":16710,"catalonia":16711,"relics":16712,"tenant":16713,"mandated":16714,"##fm":16715,"fridge":16716,"hats":16717,"honesty":16718,"patented":16719,"raul":16720,"heap":16721,"cruisers":16722,"accusing":16723,"enlightenment":16724,"infants":16725,"wherein":16726,"chatham":16727,"contractors":16728,"zen":16729,"affinity":16730,"hc":16731,"osborne":16732,"piston":16733,"156":16734,"traps":16735,"maturity":16736,"##rana":16737,"lagos":16738,"##zal":16739,"peering":16740,"##nay":16741,"attendant":16742,"dealers":16743,"protocols":16744,"subset":16745,"prospects":16746,"biographical":16747,"##cre":16748,"artery":16749,"##zers":16750,"insignia":16751,"nuns":16752,"endured":16753,"##eration":16754,"recommend":16755,"schwartz":16756,"serbs":16757,"berger":16758,"cromwell":16759,"crossroads":16760,"##ctor":16761,"enduring":16762,"clasped":16763,"grounded":16764,"##bine":16765,"marseille":16766,"twitched":16767,"abel":16768,"choke":16769,"https":16770,"catalyst":16771,"moldova":16772,"italians":16773,"##tist":16774,"disastrous":16775,"wee":16776,"##oured":16777,"##nti":16778,"wwf":16779,"nope":16780,"##piration":16781,"##asa":16782,"expresses":16783,"thumbs":16784,"167":16785,"##nza":16786,"coca":16787,"1781":16788,"cheating":16789,"##ption":16790,"skipped":16791,"sensory":16792,"heidelberg":16793,"spies":16794,"satan":16795,"dangers":16796,"semifinal":16797,"202":16798,"bohemia":16799,"whitish":16800,"confusing":16801,"shipbuilding":16802,"relies":16803,"surgeons":16804,"landings":16805,"ravi":16806,"baku":16807,"moor":16808,"suffix":16809,"alejandro":16810,"##yana":16811,"litre":16812,"upheld":16813,"##unk":16814,"rajasthan":16815,"##rek":16816,"coaster":16817,"insists":16818,"posture":16819,"scenarios":16820,"etienne":16821,"favoured":16822,"appoint":16823,"transgender":16824,"elephants":16825,"poked":16826,"greenwood":16827,"defences":16828,"fulfilled":16829,"militant":16830,"somali":16831,"1758":16832,"chalk":16833,"potent":16834,"##ucci":16835,"migrants":16836,"wink":16837,"assistants":16838,"nos":16839,"restriction":16840,"activism":16841,"niger":16842,"##ario":16843,"colon":16844,"shaun":16845,"##sat":16846,"daphne":16847,"##erated":16848,"swam":16849,"congregations":16850,"reprise":16851,"considerations":16852,"magnet":16853,"playable":16854,"xvi":16855,"##р":16856,"overthrow":16857,"tobias":16858,"knob":16859,"chavez":16860,"coding":16861,"##mers":16862,"propped":16863,"katrina":16864,"orient":16865,"newcomer":16866,"##suke":16867,"temperate":16868,"##pool":16869,"farmhouse":16870,"interrogation":16871,"##vd":16872,"committing":16873,"##vert":16874,"forthcoming":16875,"strawberry":16876,"joaquin":16877,"macau":16878,"ponds":16879,"shocking":16880,"siberia":16881,"##cellular":16882,"chant":16883,"contributors":16884,"##nant":16885,"##ologists":16886,"sped":16887,"absorb":16888,"hail":16889,"1782":16890,"spared":16891,"##hore":16892,"barbados":16893,"karate":16894,"opus":16895,"originates":16896,"saul":16897,"##xie":16898,"evergreen":16899,"leaped":16900,"##rock":16901,"correlation":16902,"exaggerated":16903,"weekday":16904,"unification":16905,"bump":16906,"tracing":16907,"brig":16908,"afb":16909,"pathways":16910,"utilizing":16911,"##ners":16912,"mod":16913,"mb":16914,"disturbance":16915,"kneeling":16916,"##stad":16917,"##guchi":16918,"100th":16919,"pune":16920,"##thy":16921,"decreasing":16922,"168":16923,"manipulation":16924,"miriam":16925,"academia":16926,"ecosystem":16927,"occupational":16928,"rbi":16929,"##lem":16930,"rift":16931,"##14":16932,"rotary":16933,"stacked":16934,"incorporation":16935,"awakening":16936,"generators":16937,"guerrero":16938,"racist":16939,"##omy":16940,"cyber":16941,"derivatives":16942,"culminated":16943,"allie":16944,"annals":16945,"panzer":16946,"sainte":16947,"wikipedia":16948,"pops":16949,"zu":16950,"austro":16951,"##vate":16952,"algerian":16953,"politely":16954,"nicholson":16955,"mornings":16956,"educate":16957,"tastes":16958,"thrill":16959,"dartmouth":16960,"##gating":16961,"db":16962,"##jee":16963,"regan":16964,"differing":16965,"concentrating":16966,"choreography":16967,"divinity":16968,"##media":16969,"pledged":16970,"alexandre":16971,"routing":16972,"gregor":16973,"madeline":16974,"##idal":16975,"apocalypse":16976,"##hora":16977,"gunfire":16978,"culminating":16979,"elves":16980,"fined":16981,"liang":16982,"lam":16983,"programmed":16984,"tar":16985,"guessing":16986,"transparency":16987,"gabrielle":16988,"##gna":16989,"cancellation":16990,"flexibility":16991,"##lining":16992,"accession":16993,"shea":16994,"stronghold":16995,"nets":16996,"specializes":16997,"##rgan":16998,"abused":16999,"hasan":17000,"sgt":17001,"ling":17002,"exceeding":17003,"##₄":17004,"admiration":17005,"supermarket":17006,"##ark":17007,"photographers":17008,"specialised":17009,"tilt":17010,"resonance":17011,"hmm":17012,"perfume":17013,"380":17014,"sami":17015,"threatens":17016,"garland":17017,"botany":17018,"guarding":17019,"boiled":17020,"greet":17021,"puppy":17022,"russo":17023,"supplier":17024,"wilmington":17025,"vibrant":17026,"vijay":17027,"##bius":17028,"paralympic":17029,"grumbled":17030,"paige":17031,"faa":17032,"licking":17033,"margins":17034,"hurricanes":17035,"##gong":17036,"fest":17037,"grenade":17038,"ripping":17039,"##uz":17040,"counseling":17041,"weigh":17042,"##sian":17043,"needles":17044,"wiltshire":17045,"edison":17046,"costly":17047,"##not":17048,"fulton":17049,"tramway":17050,"redesigned":17051,"staffordshire":17052,"cache":17053,"gasping":17054,"watkins":17055,"sleepy":17056,"candidacy":17057,"##group":17058,"monkeys":17059,"timeline":17060,"throbbing":17061,"##bid":17062,"##sos":17063,"berth":17064,"uzbekistan":17065,"vanderbilt":17066,"bothering":17067,"overturned":17068,"ballots":17069,"gem":17070,"##iger":17071,"sunglasses":17072,"subscribers":17073,"hooker":17074,"compelling":17075,"ang":17076,"exceptionally":17077,"saloon":17078,"stab":17079,"##rdi":17080,"carla":17081,"terrifying":17082,"rom":17083,"##vision":17084,"coil":17085,"##oids":17086,"satisfying":17087,"vendors":17088,"31st":17089,"mackay":17090,"deities":17091,"overlooked":17092,"ambient":17093,"bahamas":17094,"felipe":17095,"olympia":17096,"whirled":17097,"botanist":17098,"advertised":17099,"tugging":17100,"##dden":17101,"disciples":17102,"morales":17103,"unionist":17104,"rites":17105,"foley":17106,"morse":17107,"motives":17108,"creepy":17109,"##₀":17110,"soo":17111,"##sz":17112,"bargain":17113,"highness":17114,"frightening":17115,"turnpike":17116,"tory":17117,"reorganization":17118,"##cer":17119,"depict":17120,"biographer":17121,"##walk":17122,"unopposed":17123,"manifesto":17124,"##gles":17125,"institut":17126,"emile":17127,"accidental":17128,"kapoor":17129,"##dam":17130,"kilkenny":17131,"cortex":17132,"lively":17133,"##13":17134,"romanesque":17135,"jain":17136,"shan":17137,"cannons":17138,"##ood":17139,"##ske":17140,"petrol":17141,"echoing":17142,"amalgamated":17143,"disappears":17144,"cautious":17145,"proposes":17146,"sanctions":17147,"trenton":17148,"##ر":17149,"flotilla":17150,"aus":17151,"contempt":17152,"tor":17153,"canary":17154,"cote":17155,"theirs":17156,"##hun":17157,"conceptual":17158,"deleted":17159,"fascinating":17160,"paso":17161,"blazing":17162,"elf":17163,"honourable":17164,"hutchinson":17165,"##eiro":17166,"##outh":17167,"##zin":17168,"surveyor":17169,"tee":17170,"amidst":17171,"wooded":17172,"reissue":17173,"intro":17174,"##ono":17175,"cobb":17176,"shelters":17177,"newsletter":17178,"hanson":17179,"brace":17180,"encoding":17181,"confiscated":17182,"dem":17183,"caravan":17184,"marino":17185,"scroll":17186,"melodic":17187,"cows":17188,"imam":17189,"##adi":17190,"##aneous":17191,"northward":17192,"searches":17193,"biodiversity":17194,"cora":17195,"310":17196,"roaring":17197,"##bers":17198,"connell":17199,"theologian":17200,"halo":17201,"compose":17202,"pathetic":17203,"unmarried":17204,"dynamo":17205,"##oot":17206,"az":17207,"calculation":17208,"toulouse":17209,"deserves":17210,"humour":17211,"nr":17212,"forgiveness":17213,"tam":17214,"undergone":17215,"martyr":17216,"pamela":17217,"myths":17218,"whore":17219,"counselor":17220,"hicks":17221,"290":17222,"heavens":17223,"battleship":17224,"electromagnetic":17225,"##bbs":17226,"stellar":17227,"establishments":17228,"presley":17229,"hopped":17230,"##chin":17231,"temptation":17232,"90s":17233,"wills":17234,"nas":17235,"##yuan":17236,"nhs":17237,"##nya":17238,"seminars":17239,"##yev":17240,"adaptations":17241,"gong":17242,"asher":17243,"lex":17244,"indicator":17245,"sikh":17246,"tobago":17247,"cites":17248,"goin":17249,"##yte":17250,"satirical":17251,"##gies":17252,"characterised":17253,"correspond":17254,"bubbles":17255,"lure":17256,"participates":17257,"##vid":17258,"eruption":17259,"skate":17260,"therapeutic":17261,"1785":17262,"canals":17263,"wholesale":17264,"defaulted":17265,"sac":17266,"460":17267,"petit":17268,"##zzled":17269,"virgil":17270,"leak":17271,"ravens":17272,"256":17273,"portraying":17274,"##yx":17275,"ghetto":17276,"creators":17277,"dams":17278,"portray":17279,"vicente":17280,"##rington":17281,"fae":17282,"namesake":17283,"bounty":17284,"##arium":17285,"joachim":17286,"##ota":17287,"##iser":17288,"aforementioned":17289,"axle":17290,"snout":17291,"depended":17292,"dismantled":17293,"reuben":17294,"480":17295,"##ibly":17296,"gallagher":17297,"##lau":17298,"##pd":17299,"earnest":17300,"##ieu":17301,"##iary":17302,"inflicted":17303,"objections":17304,"##llar":17305,"asa":17306,"gritted":17307,"##athy":17308,"jericho":17309,"##sea":17310,"##was":17311,"flick":17312,"underside":17313,"ceramics":17314,"undead":17315,"substituted":17316,"195":17317,"eastward":17318,"undoubtedly":17319,"wheeled":17320,"chimney":17321,"##iche":17322,"guinness":17323,"cb":17324,"##ager":17325,"siding":17326,"##bell":17327,"traitor":17328,"baptiste":17329,"disguised":17330,"inauguration":17331,"149":17332,"tipperary":17333,"choreographer":17334,"perched":17335,"warmed":17336,"stationary":17337,"eco":17338,"##ike":17339,"##ntes":17340,"bacterial":17341,"##aurus":17342,"flores":17343,"phosphate":17344,"##core":17345,"attacker":17346,"invaders":17347,"alvin":17348,"intersects":17349,"a1":17350,"indirectly":17351,"immigrated":17352,"businessmen":17353,"cornelius":17354,"valves":17355,"narrated":17356,"pill":17357,"sober":17358,"ul":17359,"nationale":17360,"monastic":17361,"applicants":17362,"scenery":17363,"##jack":17364,"161":17365,"motifs":17366,"constitutes":17367,"cpu":17368,"##osh":17369,"jurisdictions":17370,"sd":17371,"tuning":17372,"irritation":17373,"woven":17374,"##uddin":17375,"fertility":17376,"gao":17377,"##erie":17378,"antagonist":17379,"impatient":17380,"glacial":17381,"hides":17382,"boarded":17383,"denominations":17384,"interception":17385,"##jas":17386,"cookie":17387,"nicola":17388,"##tee":17389,"algebraic":17390,"marquess":17391,"bahn":17392,"parole":17393,"buyers":17394,"bait":17395,"turbines":17396,"paperwork":17397,"bestowed":17398,"natasha":17399,"renee":17400,"oceans":17401,"purchases":17402,"157":17403,"vaccine":17404,"215":17405,"##tock":17406,"fixtures":17407,"playhouse":17408,"integrate":17409,"jai":17410,"oswald":17411,"intellectuals":17412,"##cky":17413,"booked":17414,"nests":17415,"mortimer":17416,"##isi":17417,"obsession":17418,"sept":17419,"##gler":17420,"##sum":17421,"440":17422,"scrutiny":17423,"simultaneous":17424,"squinted":17425,"##shin":17426,"collects":17427,"oven":17428,"shankar":17429,"penned":17430,"remarkably":17431,"##я":17432,"slips":17433,"luggage":17434,"spectral":17435,"1786":17436,"collaborations":17437,"louie":17438,"consolidation":17439,"##ailed":17440,"##ivating":17441,"420":17442,"hoover":17443,"blackpool":17444,"harness":17445,"ignition":17446,"vest":17447,"tails":17448,"belmont":17449,"mongol":17450,"skinner":17451,"##nae":17452,"visually":17453,"mage":17454,"derry":17455,"##tism":17456,"##unce":17457,"stevie":17458,"transitional":17459,"##rdy":17460,"redskins":17461,"drying":17462,"prep":17463,"prospective":17464,"##21":17465,"annoyance":17466,"oversee":17467,"##loaded":17468,"fills":17469,"##books":17470,"##iki":17471,"announces":17472,"fda":17473,"scowled":17474,"respects":17475,"prasad":17476,"mystic":17477,"tucson":17478,"##vale":17479,"revue":17480,"springer":17481,"bankrupt":17482,"1772":17483,"aristotle":17484,"salvatore":17485,"habsburg":17486,"##geny":17487,"dal":17488,"natal":17489,"nut":17490,"pod":17491,"chewing":17492,"darts":17493,"moroccan":17494,"walkover":17495,"rosario":17496,"lenin":17497,"punjabi":17498,"##ße":17499,"grossed":17500,"scattering":17501,"wired":17502,"invasive":17503,"hui":17504,"polynomial":17505,"corridors":17506,"wakes":17507,"gina":17508,"portrays":17509,"##cratic":17510,"arid":17511,"retreating":17512,"erich":17513,"irwin":17514,"sniper":17515,"##dha":17516,"linen":17517,"lindsey":17518,"maneuver":17519,"butch":17520,"shutting":17521,"socio":17522,"bounce":17523,"commemorative":17524,"postseason":17525,"jeremiah":17526,"pines":17527,"275":17528,"mystical":17529,"beads":17530,"bp":17531,"abbas":17532,"furnace":17533,"bidding":17534,"consulted":17535,"assaulted":17536,"empirical":17537,"rubble":17538,"enclosure":17539,"sob":17540,"weakly":17541,"cancel":17542,"polly":17543,"yielded":17544,"##emann":17545,"curly":17546,"prediction":17547,"battered":17548,"70s":17549,"vhs":17550,"jacqueline":17551,"render":17552,"sails":17553,"barked":17554,"detailing":17555,"grayson":17556,"riga":17557,"sloane":17558,"raging":17559,"##yah":17560,"herbs":17561,"bravo":17562,"##athlon":17563,"alloy":17564,"giggle":17565,"imminent":17566,"suffers":17567,"assumptions":17568,"waltz":17569,"##itate":17570,"accomplishments":17571,"##ited":17572,"bathing":17573,"remixed":17574,"deception":17575,"prefix":17576,"##emia":17577,"deepest":17578,"##tier":17579,"##eis":17580,"balkan":17581,"frogs":17582,"##rong":17583,"slab":17584,"##pate":17585,"philosophers":17586,"peterborough":17587,"grains":17588,"imports":17589,"dickinson":17590,"rwanda":17591,"##atics":17592,"1774":17593,"dirk":17594,"lan":17595,"tablets":17596,"##rove":17597,"clone":17598,"##rice":17599,"caretaker":17600,"hostilities":17601,"mclean":17602,"##gre":17603,"regimental":17604,"treasures":17605,"norms":17606,"impose":17607,"tsar":17608,"tango":17609,"diplomacy":17610,"variously":17611,"complain":17612,"192":17613,"recognise":17614,"arrests":17615,"1779":17616,"celestial":17617,"pulitzer":17618,"##dus":17619,"bing":17620,"libretto":17621,"##moor":17622,"adele":17623,"splash":17624,"##rite":17625,"expectation":17626,"lds":17627,"confronts":17628,"##izer":17629,"spontaneous":17630,"harmful":17631,"wedge":17632,"entrepreneurs":17633,"buyer":17634,"##ope":17635,"bilingual":17636,"translate":17637,"rugged":17638,"conner":17639,"circulated":17640,"uae":17641,"eaton":17642,"##gra":17643,"##zzle":17644,"lingered":17645,"lockheed":17646,"vishnu":17647,"reelection":17648,"alonso":17649,"##oom":17650,"joints":17651,"yankee":17652,"headline":17653,"cooperate":17654,"heinz":17655,"laureate":17656,"invading":17657,"##sford":17658,"echoes":17659,"scandinavian":17660,"##dham":17661,"hugging":17662,"vitamin":17663,"salute":17664,"micah":17665,"hind":17666,"trader":17667,"##sper":17668,"radioactive":17669,"##ndra":17670,"militants":17671,"poisoned":17672,"ratified":17673,"remark":17674,"campeonato":17675,"deprived":17676,"wander":17677,"prop":17678,"##dong":17679,"outlook":17680,"##tani":17681,"##rix":17682,"##eye":17683,"chiang":17684,"darcy":17685,"##oping":17686,"mandolin":17687,"spice":17688,"statesman":17689,"babylon":17690,"182":17691,"walled":17692,"forgetting":17693,"afro":17694,"##cap":17695,"158":17696,"giorgio":17697,"buffer":17698,"##polis":17699,"planetary":17700,"##gis":17701,"overlap":17702,"terminals":17703,"kinda":17704,"centenary":17705,"##bir":17706,"arising":17707,"manipulate":17708,"elm":17709,"ke":17710,"1770":17711,"ak":17712,"##tad":17713,"chrysler":17714,"mapped":17715,"moose":17716,"pomeranian":17717,"quad":17718,"macarthur":17719,"assemblies":17720,"shoreline":17721,"recalls":17722,"stratford":17723,"##rted":17724,"noticeable":17725,"##evic":17726,"imp":17727,"##rita":17728,"##sque":17729,"accustomed":17730,"supplying":17731,"tents":17732,"disgusted":17733,"vogue":17734,"sipped":17735,"filters":17736,"khz":17737,"reno":17738,"selecting":17739,"luftwaffe":17740,"mcmahon":17741,"tyne":17742,"masterpiece":17743,"carriages":17744,"collided":17745,"dunes":17746,"exercised":17747,"flare":17748,"remembers":17749,"muzzle":17750,"##mobile":17751,"heck":17752,"##rson":17753,"burgess":17754,"lunged":17755,"middleton":17756,"boycott":17757,"bilateral":17758,"##sity":17759,"hazardous":17760,"lumpur":17761,"multiplayer":17762,"spotlight":17763,"jackets":17764,"goldman":17765,"liege":17766,"porcelain":17767,"rag":17768,"waterford":17769,"benz":17770,"attracts":17771,"hopeful":17772,"battling":17773,"ottomans":17774,"kensington":17775,"baked":17776,"hymns":17777,"cheyenne":17778,"lattice":17779,"levine":17780,"borrow":17781,"polymer":17782,"clashes":17783,"michaels":17784,"monitored":17785,"commitments":17786,"denounced":17787,"##25":17788,"##von":17789,"cavity":17790,"##oney":17791,"hobby":17792,"akin":17793,"##holders":17794,"futures":17795,"intricate":17796,"cornish":17797,"patty":17798,"##oned":17799,"illegally":17800,"dolphin":17801,"##lag":17802,"barlow":17803,"yellowish":17804,"maddie":17805,"apologized":17806,"luton":17807,"plagued":17808,"##puram":17809,"nana":17810,"##rds":17811,"sway":17812,"fanny":17813,"łodz":17814,"##rino":17815,"psi":17816,"suspicions":17817,"hanged":17818,"##eding":17819,"initiate":17820,"charlton":17821,"##por":17822,"nak":17823,"competent":17824,"235":17825,"analytical":17826,"annex":17827,"wardrobe":17828,"reservations":17829,"##rma":17830,"sect":17831,"162":17832,"fairfax":17833,"hedge":17834,"piled":17835,"buckingham":17836,"uneven":17837,"bauer":17838,"simplicity":17839,"snyder":17840,"interpret":17841,"accountability":17842,"donors":17843,"moderately":17844,"byrd":17845,"continents":17846,"##cite":17847,"##max":17848,"disciple":17849,"hr":17850,"jamaican":17851,"ping":17852,"nominees":17853,"##uss":17854,"mongolian":17855,"diver":17856,"attackers":17857,"eagerly":17858,"ideological":17859,"pillows":17860,"miracles":17861,"apartheid":17862,"revolver":17863,"sulfur":17864,"clinics":17865,"moran":17866,"163":17867,"##enko":17868,"ile":17869,"katy":17870,"rhetoric":17871,"##icated":17872,"chronology":17873,"recycling":17874,"##hrer":17875,"elongated":17876,"mughal":17877,"pascal":17878,"profiles":17879,"vibration":17880,"databases":17881,"domination":17882,"##fare":17883,"##rant":17884,"matthias":17885,"digest":17886,"rehearsal":17887,"polling":17888,"weiss":17889,"initiation":17890,"reeves":17891,"clinging":17892,"flourished":17893,"impress":17894,"ngo":17895,"##hoff":17896,"##ume":17897,"buckley":17898,"symposium":17899,"rhythms":17900,"weed":17901,"emphasize":17902,"transforming":17903,"##taking":17904,"##gence":17905,"##yman":17906,"accountant":17907,"analyze":17908,"flicker":17909,"foil":17910,"priesthood":17911,"voluntarily":17912,"decreases":17913,"##80":17914,"##hya":17915,"slater":17916,"sv":17917,"charting":17918,"mcgill":17919,"##lde":17920,"moreno":17921,"##iu":17922,"besieged":17923,"zur":17924,"robes":17925,"##phic":17926,"admitting":17927,"api":17928,"deported":17929,"turmoil":17930,"peyton":17931,"earthquakes":17932,"##ares":17933,"nationalists":17934,"beau":17935,"clair":17936,"brethren":17937,"interrupt":17938,"welch":17939,"curated":17940,"galerie":17941,"requesting":17942,"164":17943,"##ested":17944,"impending":17945,"steward":17946,"viper":17947,"##vina":17948,"complaining":17949,"beautifully":17950,"brandy":17951,"foam":17952,"nl":17953,"1660":17954,"##cake":17955,"alessandro":17956,"punches":17957,"laced":17958,"explanations":17959,"##lim":17960,"attribute":17961,"clit":17962,"reggie":17963,"discomfort":17964,"##cards":17965,"smoothed":17966,"whales":17967,"##cene":17968,"adler":17969,"countered":17970,"duffy":17971,"disciplinary":17972,"widening":17973,"recipe":17974,"reliance":17975,"conducts":17976,"goats":17977,"gradient":17978,"preaching":17979,"##shaw":17980,"matilda":17981,"quasi":17982,"striped":17983,"meridian":17984,"cannabis":17985,"cordoba":17986,"certificates":17987,"##agh":17988,"##tering":17989,"graffiti":17990,"hangs":17991,"pilgrims":17992,"repeats":17993,"##ych":17994,"revive":17995,"urine":17996,"etat":17997,"##hawk":17998,"fueled":17999,"belts":18000,"fuzzy":18001,"susceptible":18002,"##hang":18003,"mauritius":18004,"salle":18005,"sincere":18006,"beers":18007,"hooks":18008,"##cki":18009,"arbitration":18010,"entrusted":18011,"advise":18012,"sniffed":18013,"seminar":18014,"junk":18015,"donnell":18016,"processors":18017,"principality":18018,"strapped":18019,"celia":18020,"mendoza":18021,"everton":18022,"fortunes":18023,"prejudice":18024,"starving":18025,"reassigned":18026,"steamer":18027,"##lund":18028,"tuck":18029,"evenly":18030,"foreman":18031,"##ffen":18032,"dans":18033,"375":18034,"envisioned":18035,"slit":18036,"##xy":18037,"baseman":18038,"liberia":18039,"rosemary":18040,"##weed":18041,"electrified":18042,"periodically":18043,"potassium":18044,"stride":18045,"contexts":18046,"sperm":18047,"slade":18048,"mariners":18049,"influx":18050,"bianca":18051,"subcommittee":18052,"##rane":18053,"spilling":18054,"icao":18055,"estuary":18056,"##nock":18057,"delivers":18058,"iphone":18059,"##ulata":18060,"isa":18061,"mira":18062,"bohemian":18063,"dessert":18064,"##sbury":18065,"welcoming":18066,"proudly":18067,"slowing":18068,"##chs":18069,"musee":18070,"ascension":18071,"russ":18072,"##vian":18073,"waits":18074,"##psy":18075,"africans":18076,"exploit":18077,"##morphic":18078,"gov":18079,"eccentric":18080,"crab":18081,"peck":18082,"##ull":18083,"entrances":18084,"formidable":18085,"marketplace":18086,"groom":18087,"bolted":18088,"metabolism":18089,"patton":18090,"robbins":18091,"courier":18092,"payload":18093,"endure":18094,"##ifier":18095,"andes":18096,"refrigerator":18097,"##pr":18098,"ornate":18099,"##uca":18100,"ruthless":18101,"illegitimate":18102,"masonry":18103,"strasbourg":18104,"bikes":18105,"adobe":18106,"##³":18107,"apples":18108,"quintet":18109,"willingly":18110,"niche":18111,"bakery":18112,"corpses":18113,"energetic":18114,"##cliffe":18115,"##sser":18116,"##ards":18117,"177":18118,"centimeters":18119,"centro":18120,"fuscous":18121,"cretaceous":18122,"rancho":18123,"##yde":18124,"andrei":18125,"telecom":18126,"tottenham":18127,"oasis":18128,"ordination":18129,"vulnerability":18130,"presiding":18131,"corey":18132,"cp":18133,"penguins":18134,"sims":18135,"##pis":18136,"malawi":18137,"piss":18138,"##48":18139,"correction":18140,"##cked":18141,"##ffle":18142,"##ryn":18143,"countdown":18144,"detectives":18145,"psychiatrist":18146,"psychedelic":18147,"dinosaurs":18148,"blouse":18149,"##get":18150,"choi":18151,"vowed":18152,"##oz":18153,"randomly":18154,"##pol":18155,"49ers":18156,"scrub":18157,"blanche":18158,"bruins":18159,"dusseldorf":18160,"##using":18161,"unwanted":18162,"##ums":18163,"212":18164,"dominique":18165,"elevations":18166,"headlights":18167,"om":18168,"laguna":18169,"##oga":18170,"1750":18171,"famously":18172,"ignorance":18173,"shrewsbury":18174,"##aine":18175,"ajax":18176,"breuning":18177,"che":18178,"confederacy":18179,"greco":18180,"overhaul":18181,"##screen":18182,"paz":18183,"skirts":18184,"disagreement":18185,"cruelty":18186,"jagged":18187,"phoebe":18188,"shifter":18189,"hovered":18190,"viruses":18191,"##wes":18192,"mandy":18193,"##lined":18194,"##gc":18195,"landlord":18196,"squirrel":18197,"dashed":18198,"##ι":18199,"ornamental":18200,"gag":18201,"wally":18202,"grange":18203,"literal":18204,"spurs":18205,"undisclosed":18206,"proceeding":18207,"yin":18208,"##text":18209,"billie":18210,"orphan":18211,"spanned":18212,"humidity":18213,"indy":18214,"weighted":18215,"presentations":18216,"explosions":18217,"lucian":18218,"##tary":18219,"vaughn":18220,"hindus":18221,"##anga":18222,"##hell":18223,"psycho":18224,"171":18225,"daytona":18226,"protects":18227,"efficiently":18228,"rematch":18229,"sly":18230,"tandem":18231,"##oya":18232,"rebranded":18233,"impaired":18234,"hee":18235,"metropolis":18236,"peach":18237,"godfrey":18238,"diaspora":18239,"ethnicity":18240,"prosperous":18241,"gleaming":18242,"dar":18243,"grossing":18244,"playback":18245,"##rden":18246,"stripe":18247,"pistols":18248,"##tain":18249,"births":18250,"labelled":18251,"##cating":18252,"172":18253,"rudy":18254,"alba":18255,"##onne":18256,"aquarium":18257,"hostility":18258,"##gb":18259,"##tase":18260,"shudder":18261,"sumatra":18262,"hardest":18263,"lakers":18264,"consonant":18265,"creeping":18266,"demos":18267,"homicide":18268,"capsule":18269,"zeke":18270,"liberties":18271,"expulsion":18272,"pueblo":18273,"##comb":18274,"trait":18275,"transporting":18276,"##ddin":18277,"##neck":18278,"##yna":18279,"depart":18280,"gregg":18281,"mold":18282,"ledge":18283,"hangar":18284,"oldham":18285,"playboy":18286,"termination":18287,"analysts":18288,"gmbh":18289,"romero":18290,"##itic":18291,"insist":18292,"cradle":18293,"filthy":18294,"brightness":18295,"slash":18296,"shootout":18297,"deposed":18298,"bordering":18299,"##truct":18300,"isis":18301,"microwave":18302,"tumbled":18303,"sheltered":18304,"cathy":18305,"werewolves":18306,"messy":18307,"andersen":18308,"convex":18309,"clapped":18310,"clinched":18311,"satire":18312,"wasting":18313,"edo":18314,"vc":18315,"rufus":18316,"##jak":18317,"mont":18318,"##etti":18319,"poznan":18320,"##keeping":18321,"restructuring":18322,"transverse":18323,"##rland":18324,"azerbaijani":18325,"slovene":18326,"gestures":18327,"roommate":18328,"choking":18329,"shear":18330,"##quist":18331,"vanguard":18332,"oblivious":18333,"##hiro":18334,"disagreed":18335,"baptism":18336,"##lich":18337,"coliseum":18338,"##aceae":18339,"salvage":18340,"societe":18341,"cory":18342,"locke":18343,"relocation":18344,"relying":18345,"versailles":18346,"ahl":18347,"swelling":18348,"##elo":18349,"cheerful":18350,"##word":18351,"##edes":18352,"gin":18353,"sarajevo":18354,"obstacle":18355,"diverted":18356,"##nac":18357,"messed":18358,"thoroughbred":18359,"fluttered":18360,"utrecht":18361,"chewed":18362,"acquaintance":18363,"assassins":18364,"dispatch":18365,"mirza":18366,"##wart":18367,"nike":18368,"salzburg":18369,"swell":18370,"yen":18371,"##gee":18372,"idle":18373,"ligue":18374,"samson":18375,"##nds":18376,"##igh":18377,"playful":18378,"spawned":18379,"##cise":18380,"tease":18381,"##case":18382,"burgundy":18383,"##bot":18384,"stirring":18385,"skeptical":18386,"interceptions":18387,"marathi":18388,"##dies":18389,"bedrooms":18390,"aroused":18391,"pinch":18392,"##lik":18393,"preferences":18394,"tattoos":18395,"buster":18396,"digitally":18397,"projecting":18398,"rust":18399,"##ital":18400,"kitten":18401,"priorities":18402,"addison":18403,"pseudo":18404,"##guard":18405,"dusk":18406,"icons":18407,"sermon":18408,"##psis":18409,"##iba":18410,"bt":18411,"##lift":18412,"##xt":18413,"ju":18414,"truce":18415,"rink":18416,"##dah":18417,"##wy":18418,"defects":18419,"psychiatry":18420,"offences":18421,"calculate":18422,"glucose":18423,"##iful":18424,"##rized":18425,"##unda":18426,"francaise":18427,"##hari":18428,"richest":18429,"warwickshire":18430,"carly":18431,"1763":18432,"purity":18433,"redemption":18434,"lending":18435,"##cious":18436,"muse":18437,"bruises":18438,"cerebral":18439,"aero":18440,"carving":18441,"##name":18442,"preface":18443,"terminology":18444,"invade":18445,"monty":18446,"##int":18447,"anarchist":18448,"blurred":18449,"##iled":18450,"rossi":18451,"treats":18452,"guts":18453,"shu":18454,"foothills":18455,"ballads":18456,"undertaking":18457,"premise":18458,"cecilia":18459,"affiliates":18460,"blasted":18461,"conditional":18462,"wilder":18463,"minors":18464,"drone":18465,"rudolph":18466,"buffy":18467,"swallowing":18468,"horton":18469,"attested":18470,"##hop":18471,"rutherford":18472,"howell":18473,"primetime":18474,"livery":18475,"penal":18476,"##bis":18477,"minimize":18478,"hydro":18479,"wrecked":18480,"wrought":18481,"palazzo":18482,"##gling":18483,"cans":18484,"vernacular":18485,"friedman":18486,"nobleman":18487,"shale":18488,"walnut":18489,"danielle":18490,"##ection":18491,"##tley":18492,"sears":18493,"##kumar":18494,"chords":18495,"lend":18496,"flipping":18497,"streamed":18498,"por":18499,"dracula":18500,"gallons":18501,"sacrifices":18502,"gamble":18503,"orphanage":18504,"##iman":18505,"mckenzie":18506,"##gible":18507,"boxers":18508,"daly":18509,"##balls":18510,"##ان":18511,"208":18512,"##ific":18513,"##rative":18514,"##iq":18515,"exploited":18516,"slated":18517,"##uity":18518,"circling":18519,"hillary":18520,"pinched":18521,"goldberg":18522,"provost":18523,"campaigning":18524,"lim":18525,"piles":18526,"ironically":18527,"jong":18528,"mohan":18529,"successors":18530,"usaf":18531,"##tem":18532,"##ught":18533,"autobiographical":18534,"haute":18535,"preserves":18536,"##ending":18537,"acquitted":18538,"comparisons":18539,"203":18540,"hydroelectric":18541,"gangs":18542,"cypriot":18543,"torpedoes":18544,"rushes":18545,"chrome":18546,"derive":18547,"bumps":18548,"instability":18549,"fiat":18550,"pets":18551,"##mbe":18552,"silas":18553,"dye":18554,"reckless":18555,"settler":18556,"##itation":18557,"info":18558,"heats":18559,"##writing":18560,"176":18561,"canonical":18562,"maltese":18563,"fins":18564,"mushroom":18565,"stacy":18566,"aspen":18567,"avid":18568,"##kur":18569,"##loading":18570,"vickers":18571,"gaston":18572,"hillside":18573,"statutes":18574,"wilde":18575,"gail":18576,"kung":18577,"sabine":18578,"comfortably":18579,"motorcycles":18580,"##rgo":18581,"169":18582,"pneumonia":18583,"fetch":18584,"##sonic":18585,"axel":18586,"faintly":18587,"parallels":18588,"##oop":18589,"mclaren":18590,"spouse":18591,"compton":18592,"interdisciplinary":18593,"miner":18594,"##eni":18595,"181":18596,"clamped":18597,"##chal":18598,"##llah":18599,"separates":18600,"versa":18601,"##mler":18602,"scarborough":18603,"labrador":18604,"##lity":18605,"##osing":18606,"rutgers":18607,"hurdles":18608,"como":18609,"166":18610,"burt":18611,"divers":18612,"##100":18613,"wichita":18614,"cade":18615,"coincided":18616,"##erson":18617,"bruised":18618,"mla":18619,"##pper":18620,"vineyard":18621,"##ili":18622,"##brush":18623,"notch":18624,"mentioning":18625,"jase":18626,"hearted":18627,"kits":18628,"doe":18629,"##acle":18630,"pomerania":18631,"##ady":18632,"ronan":18633,"seizure":18634,"pavel":18635,"problematic":18636,"##zaki":18637,"domenico":18638,"##ulin":18639,"catering":18640,"penelope":18641,"dependence":18642,"parental":18643,"emilio":18644,"ministerial":18645,"atkinson":18646,"##bolic":18647,"clarkson":18648,"chargers":18649,"colby":18650,"grill":18651,"peeked":18652,"arises":18653,"summon":18654,"##aged":18655,"fools":18656,"##grapher":18657,"faculties":18658,"qaeda":18659,"##vial":18660,"garner":18661,"refurbished":18662,"##hwa":18663,"geelong":18664,"disasters":18665,"nudged":18666,"bs":18667,"shareholder":18668,"lori":18669,"algae":18670,"reinstated":18671,"rot":18672,"##ades":18673,"##nous":18674,"invites":18675,"stainless":18676,"183":18677,"inclusive":18678,"##itude":18679,"diocesan":18680,"til":18681,"##icz":18682,"denomination":18683,"##xa":18684,"benton":18685,"floral":18686,"registers":18687,"##ider":18688,"##erman":18689,"##kell":18690,"absurd":18691,"brunei":18692,"guangzhou":18693,"hitter":18694,"retaliation":18695,"##uled":18696,"##eve":18697,"blanc":18698,"nh":18699,"consistency":18700,"contamination":18701,"##eres":18702,"##rner":18703,"dire":18704,"palermo":18705,"broadcasters":18706,"diaries":18707,"inspire":18708,"vols":18709,"brewer":18710,"tightening":18711,"ky":18712,"mixtape":18713,"hormone":18714,"##tok":18715,"stokes":18716,"##color":18717,"##dly":18718,"##ssi":18719,"pg":18720,"##ometer":18721,"##lington":18722,"sanitation":18723,"##tility":18724,"intercontinental":18725,"apps":18726,"##adt":18727,"¹⁄₂":18728,"cylinders":18729,"economies":18730,"favourable":18731,"unison":18732,"croix":18733,"gertrude":18734,"odyssey":18735,"vanity":18736,"dangling":18737,"##logists":18738,"upgrades":18739,"dice":18740,"middleweight":18741,"practitioner":18742,"##ight":18743,"206":18744,"henrik":18745,"parlor":18746,"orion":18747,"angered":18748,"lac":18749,"python":18750,"blurted":18751,"##rri":18752,"sensual":18753,"intends":18754,"swings":18755,"angled":18756,"##phs":18757,"husky":18758,"attain":18759,"peerage":18760,"precinct":18761,"textiles":18762,"cheltenham":18763,"shuffled":18764,"dai":18765,"confess":18766,"tasting":18767,"bhutan":18768,"##riation":18769,"tyrone":18770,"segregation":18771,"abrupt":18772,"ruiz":18773,"##rish":18774,"smirked":18775,"blackwell":18776,"confidential":18777,"browning":18778,"amounted":18779,"##put":18780,"vase":18781,"scarce":18782,"fabulous":18783,"raided":18784,"staple":18785,"guyana":18786,"unemployed":18787,"glider":18788,"shay":18789,"##tow":18790,"carmine":18791,"troll":18792,"intervene":18793,"squash":18794,"superstar":18795,"##uce":18796,"cylindrical":18797,"len":18798,"roadway":18799,"researched":18800,"handy":18801,"##rium":18802,"##jana":18803,"meta":18804,"lao":18805,"declares":18806,"##rring":18807,"##tadt":18808,"##elin":18809,"##kova":18810,"willem":18811,"shrubs":18812,"napoleonic":18813,"realms":18814,"skater":18815,"qi":18816,"volkswagen":18817,"##ł":18818,"tad":18819,"hara":18820,"archaeologist":18821,"awkwardly":18822,"eerie":18823,"##kind":18824,"wiley":18825,"##heimer":18826,"##24":18827,"titus":18828,"organizers":18829,"cfl":18830,"crusaders":18831,"lama":18832,"usb":18833,"vent":18834,"enraged":18835,"thankful":18836,"occupants":18837,"maximilian":18838,"##gaard":18839,"possessing":18840,"textbooks":18841,"##oran":18842,"collaborator":18843,"quaker":18844,"##ulo":18845,"avalanche":18846,"mono":18847,"silky":18848,"straits":18849,"isaiah":18850,"mustang":18851,"surged":18852,"resolutions":18853,"potomac":18854,"descend":18855,"cl":18856,"kilograms":18857,"plato":18858,"strains":18859,"saturdays":18860,"##olin":18861,"bernstein":18862,"##ype":18863,"holstein":18864,"ponytail":18865,"##watch":18866,"belize":18867,"conversely":18868,"heroine":18869,"perpetual":18870,"##ylus":18871,"charcoal":18872,"piedmont":18873,"glee":18874,"negotiating":18875,"backdrop":18876,"prologue":18877,"##jah":18878,"##mmy":18879,"pasadena":18880,"climbs":18881,"ramos":18882,"sunni":18883,"##holm":18884,"##tner":18885,"##tri":18886,"anand":18887,"deficiency":18888,"hertfordshire":18889,"stout":18890,"##avi":18891,"aperture":18892,"orioles":18893,"##irs":18894,"doncaster":18895,"intrigued":18896,"bombed":18897,"coating":18898,"otis":18899,"##mat":18900,"cocktail":18901,"##jit":18902,"##eto":18903,"amir":18904,"arousal":18905,"sar":18906,"##proof":18907,"##act":18908,"##ories":18909,"dixie":18910,"pots":18911,"##bow":18912,"whereabouts":18913,"159":18914,"##fted":18915,"drains":18916,"bullying":18917,"cottages":18918,"scripture":18919,"coherent":18920,"fore":18921,"poe":18922,"appetite":18923,"##uration":18924,"sampled":18925,"##ators":18926,"##dp":18927,"derrick":18928,"rotor":18929,"jays":18930,"peacock":18931,"installment":18932,"##rro":18933,"advisors":18934,"##coming":18935,"rodeo":18936,"scotch":18937,"##mot":18938,"##db":18939,"##fen":18940,"##vant":18941,"ensued":18942,"rodrigo":18943,"dictatorship":18944,"martyrs":18945,"twenties":18946,"##н":18947,"towed":18948,"incidence":18949,"marta":18950,"rainforest":18951,"sai":18952,"scaled":18953,"##cles":18954,"oceanic":18955,"qualifiers":18956,"symphonic":18957,"mcbride":18958,"dislike":18959,"generalized":18960,"aubrey":18961,"colonization":18962,"##iation":18963,"##lion":18964,"##ssing":18965,"disliked":18966,"lublin":18967,"salesman":18968,"##ulates":18969,"spherical":18970,"whatsoever":18971,"sweating":18972,"avalon":18973,"contention":18974,"punt":18975,"severity":18976,"alderman":18977,"atari":18978,"##dina":18979,"##grant":18980,"##rop":18981,"scarf":18982,"seville":18983,"vertices":18984,"annexation":18985,"fairfield":18986,"fascination":18987,"inspiring":18988,"launches":18989,"palatinate":18990,"regretted":18991,"##rca":18992,"feral":18993,"##iom":18994,"elk":18995,"nap":18996,"olsen":18997,"reddy":18998,"yong":18999,"##leader":19000,"##iae":19001,"garment":19002,"transports":19003,"feng":19004,"gracie":19005,"outrage":19006,"viceroy":19007,"insides":19008,"##esis":19009,"breakup":19010,"grady":19011,"organizer":19012,"softer":19013,"grimaced":19014,"222":19015,"murals":19016,"galicia":19017,"arranging":19018,"vectors":19019,"##rsten":19020,"bas":19021,"##sb":19022,"##cens":19023,"sloan":19024,"##eka":19025,"bitten":19026,"ara":19027,"fender":19028,"nausea":19029,"bumped":19030,"kris":19031,"banquet":19032,"comrades":19033,"detector":19034,"persisted":19035,"##llan":19036,"adjustment":19037,"endowed":19038,"cinemas":19039,"##shot":19040,"sellers":19041,"##uman":19042,"peek":19043,"epa":19044,"kindly":19045,"neglect":19046,"simpsons":19047,"talon":19048,"mausoleum":19049,"runaway":19050,"hangul":19051,"lookout":19052,"##cic":19053,"rewards":19054,"coughed":19055,"acquainted":19056,"chloride":19057,"##ald":19058,"quicker":19059,"accordion":19060,"neolithic":19061,"##qa":19062,"artemis":19063,"coefficient":19064,"lenny":19065,"pandora":19066,"tx":19067,"##xed":19068,"ecstasy":19069,"litter":19070,"segunda":19071,"chairperson":19072,"gemma":19073,"hiss":19074,"rumor":19075,"vow":19076,"nasal":19077,"antioch":19078,"compensate":19079,"patiently":19080,"transformers":19081,"##eded":19082,"judo":19083,"morrow":19084,"penis":19085,"posthumous":19086,"philips":19087,"bandits":19088,"husbands":19089,"denote":19090,"flaming":19091,"##any":19092,"##phones":19093,"langley":19094,"yorker":19095,"1760":19096,"walters":19097,"##uo":19098,"##kle":19099,"gubernatorial":19100,"fatty":19101,"samsung":19102,"leroy":19103,"outlaw":19104,"##nine":19105,"unpublished":19106,"poole":19107,"jakob":19108,"##ᵢ":19109,"##ₙ":19110,"crete":19111,"distorted":19112,"superiority":19113,"##dhi":19114,"intercept":19115,"crust":19116,"mig":19117,"claus":19118,"crashes":19119,"positioning":19120,"188":19121,"stallion":19122,"301":19123,"frontal":19124,"armistice":19125,"##estinal":19126,"elton":19127,"aj":19128,"encompassing":19129,"camel":19130,"commemorated":19131,"malaria":19132,"woodward":19133,"calf":19134,"cigar":19135,"penetrate":19136,"##oso":19137,"willard":19138,"##rno":19139,"##uche":19140,"illustrate":19141,"amusing":19142,"convergence":19143,"noteworthy":19144,"##lma":19145,"##rva":19146,"journeys":19147,"realise":19148,"manfred":19149,"##sable":19150,"410":19151,"##vocation":19152,"hearings":19153,"fiance":19154,"##posed":19155,"educators":19156,"provoked":19157,"adjusting":19158,"##cturing":19159,"modular":19160,"stockton":19161,"paterson":19162,"vlad":19163,"rejects":19164,"electors":19165,"selena":19166,"maureen":19167,"##tres":19168,"uber":19169,"##rce":19170,"swirled":19171,"##num":19172,"proportions":19173,"nanny":19174,"pawn":19175,"naturalist":19176,"parma":19177,"apostles":19178,"awoke":19179,"ethel":19180,"wen":19181,"##bey":19182,"monsoon":19183,"overview":19184,"##inating":19185,"mccain":19186,"rendition":19187,"risky":19188,"adorned":19189,"##ih":19190,"equestrian":19191,"germain":19192,"nj":19193,"conspicuous":19194,"confirming":19195,"##yoshi":19196,"shivering":19197,"##imeter":19198,"milestone":19199,"rumours":19200,"flinched":19201,"bounds":19202,"smacked":19203,"token":19204,"##bei":19205,"lectured":19206,"automobiles":19207,"##shore":19208,"impacted":19209,"##iable":19210,"nouns":19211,"nero":19212,"##leaf":19213,"ismail":19214,"prostitute":19215,"trams":19216,"##lace":19217,"bridget":19218,"sud":19219,"stimulus":19220,"impressions":19221,"reins":19222,"revolves":19223,"##oud":19224,"##gned":19225,"giro":19226,"honeymoon":19227,"##swell":19228,"criterion":19229,"##sms":19230,"##uil":19231,"libyan":19232,"prefers":19233,"##osition":19234,"211":19235,"preview":19236,"sucks":19237,"accusation":19238,"bursts":19239,"metaphor":19240,"diffusion":19241,"tolerate":19242,"faye":19243,"betting":19244,"cinematographer":19245,"liturgical":19246,"specials":19247,"bitterly":19248,"humboldt":19249,"##ckle":19250,"flux":19251,"rattled":19252,"##itzer":19253,"archaeologists":19254,"odor":19255,"authorised":19256,"marshes":19257,"discretion":19258,"##ов":19259,"alarmed":19260,"archaic":19261,"inverse":19262,"##leton":19263,"explorers":19264,"##pine":19265,"drummond":19266,"tsunami":19267,"woodlands":19268,"##minate":19269,"##tland":19270,"booklet":19271,"insanity":19272,"owning":19273,"insert":19274,"crafted":19275,"calculus":19276,"##tore":19277,"receivers":19278,"##bt":19279,"stung":19280,"##eca":19281,"##nched":19282,"prevailing":19283,"travellers":19284,"eyeing":19285,"lila":19286,"graphs":19287,"##borne":19288,"178":19289,"julien":19290,"##won":19291,"morale":19292,"adaptive":19293,"therapist":19294,"erica":19295,"cw":19296,"libertarian":19297,"bowman":19298,"pitches":19299,"vita":19300,"##ional":19301,"crook":19302,"##ads":19303,"##entation":19304,"caledonia":19305,"mutiny":19306,"##sible":19307,"1840s":19308,"automation":19309,"##ß":19310,"flock":19311,"##pia":19312,"ironic":19313,"pathology":19314,"##imus":19315,"remarried":19316,"##22":19317,"joker":19318,"withstand":19319,"energies":19320,"##att":19321,"shropshire":19322,"hostages":19323,"madeleine":19324,"tentatively":19325,"conflicting":19326,"mateo":19327,"recipes":19328,"euros":19329,"ol":19330,"mercenaries":19331,"nico":19332,"##ndon":19333,"albuquerque":19334,"augmented":19335,"mythical":19336,"bel":19337,"freud":19338,"##child":19339,"cough":19340,"##lica":19341,"365":19342,"freddy":19343,"lillian":19344,"genetically":19345,"nuremberg":19346,"calder":19347,"209":19348,"bonn":19349,"outdoors":19350,"paste":19351,"suns":19352,"urgency":19353,"vin":19354,"restraint":19355,"tyson":19356,"##cera":19357,"##selle":19358,"barrage":19359,"bethlehem":19360,"kahn":19361,"##par":19362,"mounts":19363,"nippon":19364,"barony":19365,"happier":19366,"ryu":19367,"makeshift":19368,"sheldon":19369,"blushed":19370,"castillo":19371,"barking":19372,"listener":19373,"taped":19374,"bethel":19375,"fluent":19376,"headlines":19377,"pornography":19378,"rum":19379,"disclosure":19380,"sighing":19381,"mace":19382,"doubling":19383,"gunther":19384,"manly":19385,"##plex":19386,"rt":19387,"interventions":19388,"physiological":19389,"forwards":19390,"emerges":19391,"##tooth":19392,"##gny":19393,"compliment":19394,"rib":19395,"recession":19396,"visibly":19397,"barge":19398,"faults":19399,"connector":19400,"exquisite":19401,"prefect":19402,"##rlin":19403,"patio":19404,"##cured":19405,"elevators":19406,"brandt":19407,"italics":19408,"pena":19409,"173":19410,"wasp":19411,"satin":19412,"ea":19413,"botswana":19414,"graceful":19415,"respectable":19416,"##jima":19417,"##rter":19418,"##oic":19419,"franciscan":19420,"generates":19421,"##dl":19422,"alfredo":19423,"disgusting":19424,"##olate":19425,"##iously":19426,"sherwood":19427,"warns":19428,"cod":19429,"promo":19430,"cheryl":19431,"sino":19432,"##ة":19433,"##escu":19434,"twitch":19435,"##zhi":19436,"brownish":19437,"thom":19438,"ortiz":19439,"##dron":19440,"densely":19441,"##beat":19442,"carmel":19443,"reinforce":19444,"##bana":19445,"187":19446,"anastasia":19447,"downhill":19448,"vertex":19449,"contaminated":19450,"remembrance":19451,"harmonic":19452,"homework":19453,"##sol":19454,"fiancee":19455,"gears":19456,"olds":19457,"angelica":19458,"loft":19459,"ramsay":19460,"quiz":19461,"colliery":19462,"sevens":19463,"##cape":19464,"autism":19465,"##hil":19466,"walkway":19467,"##boats":19468,"ruben":19469,"abnormal":19470,"ounce":19471,"khmer":19472,"##bbe":19473,"zachary":19474,"bedside":19475,"morphology":19476,"punching":19477,"##olar":19478,"sparrow":19479,"convinces":19480,"##35":19481,"hewitt":19482,"queer":19483,"remastered":19484,"rods":19485,"mabel":19486,"solemn":19487,"notified":19488,"lyricist":19489,"symmetric":19490,"##xide":19491,"174":19492,"encore":19493,"passports":19494,"wildcats":19495,"##uni":19496,"baja":19497,"##pac":19498,"mildly":19499,"##ease":19500,"bleed":19501,"commodity":19502,"mounds":19503,"glossy":19504,"orchestras":19505,"##omo":19506,"damian":19507,"prelude":19508,"ambitions":19509,"##vet":19510,"awhile":19511,"remotely":19512,"##aud":19513,"asserts":19514,"imply":19515,"##iques":19516,"distinctly":19517,"modelling":19518,"remedy":19519,"##dded":19520,"windshield":19521,"dani":19522,"xiao":19523,"##endra":19524,"audible":19525,"powerplant":19526,"1300":19527,"invalid":19528,"elemental":19529,"acquisitions":19530,"##hala":19531,"immaculate":19532,"libby":19533,"plata":19534,"smuggling":19535,"ventilation":19536,"denoted":19537,"minh":19538,"##morphism":19539,"430":19540,"differed":19541,"dion":19542,"kelley":19543,"lore":19544,"mocking":19545,"sabbath":19546,"spikes":19547,"hygiene":19548,"drown":19549,"runoff":19550,"stylized":19551,"tally":19552,"liberated":19553,"aux":19554,"interpreter":19555,"righteous":19556,"aba":19557,"siren":19558,"reaper":19559,"pearce":19560,"millie":19561,"##cier":19562,"##yra":19563,"gaius":19564,"##iso":19565,"captures":19566,"##ttering":19567,"dorm":19568,"claudio":19569,"##sic":19570,"benches":19571,"knighted":19572,"blackness":19573,"##ored":19574,"discount":19575,"fumble":19576,"oxidation":19577,"routed":19578,"##ς":19579,"novak":19580,"perpendicular":19581,"spoiled":19582,"fracture":19583,"splits":19584,"##urt":19585,"pads":19586,"topology":19587,"##cats":19588,"axes":19589,"fortunate":19590,"offenders":19591,"protestants":19592,"esteem":19593,"221":19594,"broadband":19595,"convened":19596,"frankly":19597,"hound":19598,"prototypes":19599,"isil":19600,"facilitated":19601,"keel":19602,"##sher":19603,"sahara":19604,"awaited":19605,"bubba":19606,"orb":19607,"prosecutors":19608,"186":19609,"hem":19610,"520":19611,"##xing":19612,"relaxing":19613,"remnant":19614,"romney":19615,"sorted":19616,"slalom":19617,"stefano":19618,"ulrich":19619,"##active":19620,"exemption":19621,"folder":19622,"pauses":19623,"foliage":19624,"hitchcock":19625,"epithet":19626,"204":19627,"criticisms":19628,"##aca":19629,"ballistic":19630,"brody":19631,"hinduism":19632,"chaotic":19633,"youths":19634,"equals":19635,"##pala":19636,"pts":19637,"thicker":19638,"analogous":19639,"capitalist":19640,"improvised":19641,"overseeing":19642,"sinatra":19643,"ascended":19644,"beverage":19645,"##tl":19646,"straightforward":19647,"##kon":19648,"curran":19649,"##west":19650,"bois":19651,"325":19652,"induce":19653,"surveying":19654,"emperors":19655,"sax":19656,"unpopular":19657,"##kk":19658,"cartoonist":19659,"fused":19660,"##mble":19661,"unto":19662,"##yuki":19663,"localities":19664,"##cko":19665,"##ln":19666,"darlington":19667,"slain":19668,"academie":19669,"lobbying":19670,"sediment":19671,"puzzles":19672,"##grass":19673,"defiance":19674,"dickens":19675,"manifest":19676,"tongues":19677,"alumnus":19678,"arbor":19679,"coincide":19680,"184":19681,"appalachian":19682,"mustafa":19683,"examiner":19684,"cabaret":19685,"traumatic":19686,"yves":19687,"bracelet":19688,"draining":19689,"heroin":19690,"magnum":19691,"baths":19692,"odessa":19693,"consonants":19694,"mitsubishi":19695,"##gua":19696,"kellan":19697,"vaudeville":19698,"##fr":19699,"joked":19700,"null":19701,"straps":19702,"probation":19703,"##ław":19704,"ceded":19705,"interfaces":19706,"##pas":19707,"##zawa":19708,"blinding":19709,"viet":19710,"224":19711,"rothschild":19712,"museo":19713,"640":19714,"huddersfield":19715,"##vr":19716,"tactic":19717,"##storm":19718,"brackets":19719,"dazed":19720,"incorrectly":19721,"##vu":19722,"reg":19723,"glazed":19724,"fearful":19725,"manifold":19726,"benefited":19727,"irony":19728,"##sun":19729,"stumbling":19730,"##rte":19731,"willingness":19732,"balkans":19733,"mei":19734,"wraps":19735,"##aba":19736,"injected":19737,"##lea":19738,"gu":19739,"syed":19740,"harmless":19741,"##hammer":19742,"bray":19743,"takeoff":19744,"poppy":19745,"timor":19746,"cardboard":19747,"astronaut":19748,"purdue":19749,"weeping":19750,"southbound":19751,"cursing":19752,"stalls":19753,"diagonal":19754,"##neer":19755,"lamar":19756,"bryce":19757,"comte":19758,"weekdays":19759,"harrington":19760,"##uba":19761,"negatively":19762,"##see":19763,"lays":19764,"grouping":19765,"##cken":19766,"##henko":19767,"affirmed":19768,"halle":19769,"modernist":19770,"##lai":19771,"hodges":19772,"smelling":19773,"aristocratic":19774,"baptized":19775,"dismiss":19776,"justification":19777,"oilers":19778,"##now":19779,"coupling":19780,"qin":19781,"snack":19782,"healer":19783,"##qing":19784,"gardener":19785,"layla":19786,"battled":19787,"formulated":19788,"stephenson":19789,"gravitational":19790,"##gill":19791,"##jun":19792,"1768":19793,"granny":19794,"coordinating":19795,"suites":19796,"##cd":19797,"##ioned":19798,"monarchs":19799,"##cote":19800,"##hips":19801,"sep":19802,"blended":19803,"apr":19804,"barrister":19805,"deposition":19806,"fia":19807,"mina":19808,"policemen":19809,"paranoid":19810,"##pressed":19811,"churchyard":19812,"covert":19813,"crumpled":19814,"creep":19815,"abandoning":19816,"tr":19817,"transmit":19818,"conceal":19819,"barr":19820,"understands":19821,"readiness":19822,"spire":19823,"##cology":19824,"##enia":19825,"##erry":19826,"610":19827,"startling":19828,"unlock":19829,"vida":19830,"bowled":19831,"slots":19832,"##nat":19833,"##islav":19834,"spaced":19835,"trusting":19836,"admire":19837,"rig":19838,"##ink":19839,"slack":19840,"##70":19841,"mv":19842,"207":19843,"casualty":19844,"##wei":19845,"classmates":19846,"##odes":19847,"##rar":19848,"##rked":19849,"amherst":19850,"furnished":19851,"evolve":19852,"foundry":19853,"menace":19854,"mead":19855,"##lein":19856,"flu":19857,"wesleyan":19858,"##kled":19859,"monterey":19860,"webber":19861,"##vos":19862,"wil":19863,"##mith":19864,"##на":19865,"bartholomew":19866,"justices":19867,"restrained":19868,"##cke":19869,"amenities":19870,"191":19871,"mediated":19872,"sewage":19873,"trenches":19874,"ml":19875,"mainz":19876,"##thus":19877,"1800s":19878,"##cula":19879,"##inski":19880,"caine":19881,"bonding":19882,"213":19883,"converts":19884,"spheres":19885,"superseded":19886,"marianne":19887,"crypt":19888,"sweaty":19889,"ensign":19890,"historia":19891,"##br":19892,"spruce":19893,"##post":19894,"##ask":19895,"forks":19896,"thoughtfully":19897,"yukon":19898,"pamphlet":19899,"ames":19900,"##uter":19901,"karma":19902,"##yya":19903,"bryn":19904,"negotiation":19905,"sighs":19906,"incapable":19907,"##mbre":19908,"##ntial":19909,"actresses":19910,"taft":19911,"##mill":19912,"luce":19913,"prevailed":19914,"##amine":19915,"1773":19916,"motionless":19917,"envoy":19918,"testify":19919,"investing":19920,"sculpted":19921,"instructors":19922,"provence":19923,"kali":19924,"cullen":19925,"horseback":19926,"##while":19927,"goodwin":19928,"##jos":19929,"gaa":19930,"norte":19931,"##ldon":19932,"modify":19933,"wavelength":19934,"abd":19935,"214":19936,"skinned":19937,"sprinter":19938,"forecast":19939,"scheduling":19940,"marries":19941,"squared":19942,"tentative":19943,"##chman":19944,"boer":19945,"##isch":19946,"bolts":19947,"swap":19948,"fisherman":19949,"assyrian":19950,"impatiently":19951,"guthrie":19952,"martins":19953,"murdoch":19954,"194":19955,"tanya":19956,"nicely":19957,"dolly":19958,"lacy":19959,"med":19960,"##45":19961,"syn":19962,"decks":19963,"fashionable":19964,"millionaire":19965,"##ust":19966,"surfing":19967,"##ml":19968,"##ision":19969,"heaved":19970,"tammy":19971,"consulate":19972,"attendees":19973,"routinely":19974,"197":19975,"fuse":19976,"saxophonist":19977,"backseat":19978,"malaya":19979,"##lord":19980,"scowl":19981,"tau":19982,"##ishly":19983,"193":19984,"sighted":19985,"steaming":19986,"##rks":19987,"303":19988,"911":19989,"##holes":19990,"##hong":19991,"ching":19992,"##wife":19993,"bless":19994,"conserved":19995,"jurassic":19996,"stacey":19997,"unix":19998,"zion":19999,"chunk":20000,"rigorous":20001,"blaine":20002,"198":20003,"peabody":20004,"slayer":20005,"dismay":20006,"brewers":20007,"nz":20008,"##jer":20009,"det":20010,"##glia":20011,"glover":20012,"postwar":20013,"int":20014,"penetration":20015,"sylvester":20016,"imitation":20017,"vertically":20018,"airlift":20019,"heiress":20020,"knoxville":20021,"viva":20022,"##uin":20023,"390":20024,"macon":20025,"##rim":20026,"##fighter":20027,"##gonal":20028,"janice":20029,"##orescence":20030,"##wari":20031,"marius":20032,"belongings":20033,"leicestershire":20034,"196":20035,"blanco":20036,"inverted":20037,"preseason":20038,"sanity":20039,"sobbing":20040,"##due":20041,"##elt":20042,"##dled":20043,"collingwood":20044,"regeneration":20045,"flickering":20046,"shortest":20047,"##mount":20048,"##osi":20049,"feminism":20050,"##lat":20051,"sherlock":20052,"cabinets":20053,"fumbled":20054,"northbound":20055,"precedent":20056,"snaps":20057,"##mme":20058,"researching":20059,"##akes":20060,"guillaume":20061,"insights":20062,"manipulated":20063,"vapor":20064,"neighbour":20065,"sap":20066,"gangster":20067,"frey":20068,"f1":20069,"stalking":20070,"scarcely":20071,"callie":20072,"barnett":20073,"tendencies":20074,"audi":20075,"doomed":20076,"assessing":20077,"slung":20078,"panchayat":20079,"ambiguous":20080,"bartlett":20081,"##etto":20082,"distributing":20083,"violating":20084,"wolverhampton":20085,"##hetic":20086,"swami":20087,"histoire":20088,"##urus":20089,"liable":20090,"pounder":20091,"groin":20092,"hussain":20093,"larsen":20094,"popping":20095,"surprises":20096,"##atter":20097,"vie":20098,"curt":20099,"##station":20100,"mute":20101,"relocate":20102,"musicals":20103,"authorization":20104,"richter":20105,"##sef":20106,"immortality":20107,"tna":20108,"bombings":20109,"##press":20110,"deteriorated":20111,"yiddish":20112,"##acious":20113,"robbed":20114,"colchester":20115,"cs":20116,"pmid":20117,"ao":20118,"verified":20119,"balancing":20120,"apostle":20121,"swayed":20122,"recognizable":20123,"oxfordshire":20124,"retention":20125,"nottinghamshire":20126,"contender":20127,"judd":20128,"invitational":20129,"shrimp":20130,"uhf":20131,"##icient":20132,"cleaner":20133,"longitudinal":20134,"tanker":20135,"##mur":20136,"acronym":20137,"broker":20138,"koppen":20139,"sundance":20140,"suppliers":20141,"##gil":20142,"4000":20143,"clipped":20144,"fuels":20145,"petite":20146,"##anne":20147,"landslide":20148,"helene":20149,"diversion":20150,"populous":20151,"landowners":20152,"auspices":20153,"melville":20154,"quantitative":20155,"##xes":20156,"ferries":20157,"nicky":20158,"##llus":20159,"doo":20160,"haunting":20161,"roche":20162,"carver":20163,"downed":20164,"unavailable":20165,"##pathy":20166,"approximation":20167,"hiroshima":20168,"##hue":20169,"garfield":20170,"valle":20171,"comparatively":20172,"keyboardist":20173,"traveler":20174,"##eit":20175,"congestion":20176,"calculating":20177,"subsidiaries":20178,"##bate":20179,"serb":20180,"modernization":20181,"fairies":20182,"deepened":20183,"ville":20184,"averages":20185,"##lore":20186,"inflammatory":20187,"tonga":20188,"##itch":20189,"co₂":20190,"squads":20191,"##hea":20192,"gigantic":20193,"serum":20194,"enjoyment":20195,"retailer":20196,"verona":20197,"35th":20198,"cis":20199,"##phobic":20200,"magna":20201,"technicians":20202,"##vati":20203,"arithmetic":20204,"##sport":20205,"levin":20206,"##dation":20207,"amtrak":20208,"chow":20209,"sienna":20210,"##eyer":20211,"backstage":20212,"entrepreneurship":20213,"##otic":20214,"learnt":20215,"tao":20216,"##udy":20217,"worcestershire":20218,"formulation":20219,"baggage":20220,"hesitant":20221,"bali":20222,"sabotage":20223,"##kari":20224,"barren":20225,"enhancing":20226,"murmur":20227,"pl":20228,"freshly":20229,"putnam":20230,"syntax":20231,"aces":20232,"medicines":20233,"resentment":20234,"bandwidth":20235,"##sier":20236,"grins":20237,"chili":20238,"guido":20239,"##sei":20240,"framing":20241,"implying":20242,"gareth":20243,"lissa":20244,"genevieve":20245,"pertaining":20246,"admissions":20247,"geo":20248,"thorpe":20249,"proliferation":20250,"sato":20251,"bela":20252,"analyzing":20253,"parting":20254,"##gor":20255,"awakened":20256,"##isman":20257,"huddled":20258,"secrecy":20259,"##kling":20260,"hush":20261,"gentry":20262,"540":20263,"dungeons":20264,"##ego":20265,"coasts":20266,"##utz":20267,"sacrificed":20268,"##chule":20269,"landowner":20270,"mutually":20271,"prevalence":20272,"programmer":20273,"adolescent":20274,"disrupted":20275,"seaside":20276,"gee":20277,"trusts":20278,"vamp":20279,"georgie":20280,"##nesian":20281,"##iol":20282,"schedules":20283,"sindh":20284,"##market":20285,"etched":20286,"hm":20287,"sparse":20288,"bey":20289,"beaux":20290,"scratching":20291,"gliding":20292,"unidentified":20293,"216":20294,"collaborating":20295,"gems":20296,"jesuits":20297,"oro":20298,"accumulation":20299,"shaping":20300,"mbe":20301,"anal":20302,"##xin":20303,"231":20304,"enthusiasts":20305,"newscast":20306,"##egan":20307,"janata":20308,"dewey":20309,"parkinson":20310,"179":20311,"ankara":20312,"biennial":20313,"towering":20314,"dd":20315,"inconsistent":20316,"950":20317,"##chet":20318,"thriving":20319,"terminate":20320,"cabins":20321,"furiously":20322,"eats":20323,"advocating":20324,"donkey":20325,"marley":20326,"muster":20327,"phyllis":20328,"leiden":20329,"##user":20330,"grassland":20331,"glittering":20332,"iucn":20333,"loneliness":20334,"217":20335,"memorandum":20336,"armenians":20337,"##ddle":20338,"popularized":20339,"rhodesia":20340,"60s":20341,"lame":20342,"##illon":20343,"sans":20344,"bikini":20345,"header":20346,"orbits":20347,"##xx":20348,"##finger":20349,"##ulator":20350,"sharif":20351,"spines":20352,"biotechnology":20353,"strolled":20354,"naughty":20355,"yates":20356,"##wire":20357,"fremantle":20358,"milo":20359,"##mour":20360,"abducted":20361,"removes":20362,"##atin":20363,"humming":20364,"wonderland":20365,"##chrome":20366,"##ester":20367,"hume":20368,"pivotal":20369,"##rates":20370,"armand":20371,"grams":20372,"believers":20373,"elector":20374,"rte":20375,"apron":20376,"bis":20377,"scraped":20378,"##yria":20379,"endorsement":20380,"initials":20381,"##llation":20382,"eps":20383,"dotted":20384,"hints":20385,"buzzing":20386,"emigration":20387,"nearer":20388,"##tom":20389,"indicators":20390,"##ulu":20391,"coarse":20392,"neutron":20393,"protectorate":20394,"##uze":20395,"directional":20396,"exploits":20397,"pains":20398,"loire":20399,"1830s":20400,"proponents":20401,"guggenheim":20402,"rabbits":20403,"ritchie":20404,"305":20405,"hectare":20406,"inputs":20407,"hutton":20408,"##raz":20409,"verify":20410,"##ako":20411,"boilers":20412,"longitude":20413,"##lev":20414,"skeletal":20415,"yer":20416,"emilia":20417,"citrus":20418,"compromised":20419,"##gau":20420,"pokemon":20421,"prescription":20422,"paragraph":20423,"eduard":20424,"cadillac":20425,"attire":20426,"categorized":20427,"kenyan":20428,"weddings":20429,"charley":20430,"##bourg":20431,"entertain":20432,"monmouth":20433,"##lles":20434,"nutrients":20435,"davey":20436,"mesh":20437,"incentive":20438,"practised":20439,"ecosystems":20440,"kemp":20441,"subdued":20442,"overheard":20443,"##rya":20444,"bodily":20445,"maxim":20446,"##nius":20447,"apprenticeship":20448,"ursula":20449,"##fight":20450,"lodged":20451,"rug":20452,"silesian":20453,"unconstitutional":20454,"patel":20455,"inspected":20456,"coyote":20457,"unbeaten":20458,"##hak":20459,"34th":20460,"disruption":20461,"convict":20462,"parcel":20463,"##cl":20464,"##nham":20465,"collier":20466,"implicated":20467,"mallory":20468,"##iac":20469,"##lab":20470,"susannah":20471,"winkler":20472,"##rber":20473,"shia":20474,"phelps":20475,"sediments":20476,"graphical":20477,"robotic":20478,"##sner":20479,"adulthood":20480,"mart":20481,"smoked":20482,"##isto":20483,"kathryn":20484,"clarified":20485,"##aran":20486,"divides":20487,"convictions":20488,"oppression":20489,"pausing":20490,"burying":20491,"##mt":20492,"federico":20493,"mathias":20494,"eileen":20495,"##tana":20496,"kite":20497,"hunched":20498,"##acies":20499,"189":20500,"##atz":20501,"disadvantage":20502,"liza":20503,"kinetic":20504,"greedy":20505,"paradox":20506,"yokohama":20507,"dowager":20508,"trunks":20509,"ventured":20510,"##gement":20511,"gupta":20512,"vilnius":20513,"olaf":20514,"##thest":20515,"crimean":20516,"hopper":20517,"##ej":20518,"progressively":20519,"arturo":20520,"mouthed":20521,"arrondissement":20522,"##fusion":20523,"rubin":20524,"simulcast":20525,"oceania":20526,"##orum":20527,"##stra":20528,"##rred":20529,"busiest":20530,"intensely":20531,"navigator":20532,"cary":20533,"##vine":20534,"##hini":20535,"##bies":20536,"fife":20537,"rowe":20538,"rowland":20539,"posing":20540,"insurgents":20541,"shafts":20542,"lawsuits":20543,"activate":20544,"conor":20545,"inward":20546,"culturally":20547,"garlic":20548,"265":20549,"##eering":20550,"eclectic":20551,"##hui":20552,"##kee":20553,"##nl":20554,"furrowed":20555,"vargas":20556,"meteorological":20557,"rendezvous":20558,"##aus":20559,"culinary":20560,"commencement":20561,"##dition":20562,"quota":20563,"##notes":20564,"mommy":20565,"salaries":20566,"overlapping":20567,"mule":20568,"##iology":20569,"##mology":20570,"sums":20571,"wentworth":20572,"##isk":20573,"##zione":20574,"mainline":20575,"subgroup":20576,"##illy":20577,"hack":20578,"plaintiff":20579,"verdi":20580,"bulb":20581,"differentiation":20582,"engagements":20583,"multinational":20584,"supplemented":20585,"bertrand":20586,"caller":20587,"regis":20588,"##naire":20589,"##sler":20590,"##arts":20591,"##imated":20592,"blossom":20593,"propagation":20594,"kilometer":20595,"viaduct":20596,"vineyards":20597,"##uate":20598,"beckett":20599,"optimization":20600,"golfer":20601,"songwriters":20602,"seminal":20603,"semitic":20604,"thud":20605,"volatile":20606,"evolving":20607,"ridley":20608,"##wley":20609,"trivial":20610,"distributions":20611,"scandinavia":20612,"jiang":20613,"##ject":20614,"wrestled":20615,"insistence":20616,"##dio":20617,"emphasizes":20618,"napkin":20619,"##ods":20620,"adjunct":20621,"rhyme":20622,"##ricted":20623,"##eti":20624,"hopeless":20625,"surrounds":20626,"tremble":20627,"32nd":20628,"smoky":20629,"##ntly":20630,"oils":20631,"medicinal":20632,"padded":20633,"steer":20634,"wilkes":20635,"219":20636,"255":20637,"concessions":20638,"hue":20639,"uniquely":20640,"blinded":20641,"landon":20642,"yahoo":20643,"##lane":20644,"hendrix":20645,"commemorating":20646,"dex":20647,"specify":20648,"chicks":20649,"##ggio":20650,"intercity":20651,"1400":20652,"morley":20653,"##torm":20654,"highlighting":20655,"##oting":20656,"pang":20657,"oblique":20658,"stalled":20659,"##liner":20660,"flirting":20661,"newborn":20662,"1769":20663,"bishopric":20664,"shaved":20665,"232":20666,"currie":20667,"##ush":20668,"dharma":20669,"spartan":20670,"##ooped":20671,"favorites":20672,"smug":20673,"novella":20674,"sirens":20675,"abusive":20676,"creations":20677,"espana":20678,"##lage":20679,"paradigm":20680,"semiconductor":20681,"sheen":20682,"##rdo":20683,"##yen":20684,"##zak":20685,"nrl":20686,"renew":20687,"##pose":20688,"##tur":20689,"adjutant":20690,"marches":20691,"norma":20692,"##enity":20693,"ineffective":20694,"weimar":20695,"grunt":20696,"##gat":20697,"lordship":20698,"plotting":20699,"expenditure":20700,"infringement":20701,"lbs":20702,"refrain":20703,"av":20704,"mimi":20705,"mistakenly":20706,"postmaster":20707,"1771":20708,"##bara":20709,"ras":20710,"motorsports":20711,"tito":20712,"199":20713,"subjective":20714,"##zza":20715,"bully":20716,"stew":20717,"##kaya":20718,"prescott":20719,"1a":20720,"##raphic":20721,"##zam":20722,"bids":20723,"styling":20724,"paranormal":20725,"reeve":20726,"sneaking":20727,"exploding":20728,"katz":20729,"akbar":20730,"migrant":20731,"syllables":20732,"indefinitely":20733,"##ogical":20734,"destroys":20735,"replaces":20736,"applause":20737,"##phine":20738,"pest":20739,"##fide":20740,"218":20741,"articulated":20742,"bertie":20743,"##thing":20744,"##cars":20745,"##ptic":20746,"courtroom":20747,"crowley":20748,"aesthetics":20749,"cummings":20750,"tehsil":20751,"hormones":20752,"titanic":20753,"dangerously":20754,"##ibe":20755,"stadion":20756,"jaenelle":20757,"auguste":20758,"ciudad":20759,"##chu":20760,"mysore":20761,"partisans":20762,"##sio":20763,"lucan":20764,"philipp":20765,"##aly":20766,"debating":20767,"henley":20768,"interiors":20769,"##rano":20770,"##tious":20771,"homecoming":20772,"beyonce":20773,"usher":20774,"henrietta":20775,"prepares":20776,"weeds":20777,"##oman":20778,"ely":20779,"plucked":20780,"##pire":20781,"##dable":20782,"luxurious":20783,"##aq":20784,"artifact":20785,"password":20786,"pasture":20787,"juno":20788,"maddy":20789,"minsk":20790,"##dder":20791,"##ologies":20792,"##rone":20793,"assessments":20794,"martian":20795,"royalist":20796,"1765":20797,"examines":20798,"##mani":20799,"##rge":20800,"nino":20801,"223":20802,"parry":20803,"scooped":20804,"relativity":20805,"##eli":20806,"##uting":20807,"##cao":20808,"congregational":20809,"noisy":20810,"traverse":20811,"##agawa":20812,"strikeouts":20813,"nickelodeon":20814,"obituary":20815,"transylvania":20816,"binds":20817,"depictions":20818,"polk":20819,"trolley":20820,"##yed":20821,"##lard":20822,"breeders":20823,"##under":20824,"dryly":20825,"hokkaido":20826,"1762":20827,"strengths":20828,"stacks":20829,"bonaparte":20830,"connectivity":20831,"neared":20832,"prostitutes":20833,"stamped":20834,"anaheim":20835,"gutierrez":20836,"sinai":20837,"##zzling":20838,"bram":20839,"fresno":20840,"madhya":20841,"##86":20842,"proton":20843,"##lena":20844,"##llum":20845,"##phon":20846,"reelected":20847,"wanda":20848,"##anus":20849,"##lb":20850,"ample":20851,"distinguishing":20852,"##yler":20853,"grasping":20854,"sermons":20855,"tomato":20856,"bland":20857,"stimulation":20858,"avenues":20859,"##eux":20860,"spreads":20861,"scarlett":20862,"fern":20863,"pentagon":20864,"assert":20865,"baird":20866,"chesapeake":20867,"ir":20868,"calmed":20869,"distortion":20870,"fatalities":20871,"##olis":20872,"correctional":20873,"pricing":20874,"##astic":20875,"##gina":20876,"prom":20877,"dammit":20878,"ying":20879,"collaborate":20880,"##chia":20881,"welterweight":20882,"33rd":20883,"pointer":20884,"substitution":20885,"bonded":20886,"umpire":20887,"communicating":20888,"multitude":20889,"paddle":20890,"##obe":20891,"federally":20892,"intimacy":20893,"##insky":20894,"betray":20895,"ssr":20896,"##lett":20897,"##lean":20898,"##lves":20899,"##therapy":20900,"airbus":20901,"##tery":20902,"functioned":20903,"ud":20904,"bearer":20905,"biomedical":20906,"netflix":20907,"##hire":20908,"##nca":20909,"condom":20910,"brink":20911,"ik":20912,"##nical":20913,"macy":20914,"##bet":20915,"flap":20916,"gma":20917,"experimented":20918,"jelly":20919,"lavender":20920,"##icles":20921,"##ulia":20922,"munro":20923,"##mian":20924,"##tial":20925,"rye":20926,"##rle":20927,"60th":20928,"gigs":20929,"hottest":20930,"rotated":20931,"predictions":20932,"fuji":20933,"bu":20934,"##erence":20935,"##omi":20936,"barangay":20937,"##fulness":20938,"##sas":20939,"clocks":20940,"##rwood":20941,"##liness":20942,"cereal":20943,"roe":20944,"wight":20945,"decker":20946,"uttered":20947,"babu":20948,"onion":20949,"xml":20950,"forcibly":20951,"##df":20952,"petra":20953,"sarcasm":20954,"hartley":20955,"peeled":20956,"storytelling":20957,"##42":20958,"##xley":20959,"##ysis":20960,"##ffa":20961,"fibre":20962,"kiel":20963,"auditor":20964,"fig":20965,"harald":20966,"greenville":20967,"##berries":20968,"geographically":20969,"nell":20970,"quartz":20971,"##athic":20972,"cemeteries":20973,"##lr":20974,"crossings":20975,"nah":20976,"holloway":20977,"reptiles":20978,"chun":20979,"sichuan":20980,"snowy":20981,"660":20982,"corrections":20983,"##ivo":20984,"zheng":20985,"ambassadors":20986,"blacksmith":20987,"fielded":20988,"fluids":20989,"hardcover":20990,"turnover":20991,"medications":20992,"melvin":20993,"academies":20994,"##erton":20995,"ro":20996,"roach":20997,"absorbing":20998,"spaniards":20999,"colton":21000,"##founded":21001,"outsider":21002,"espionage":21003,"kelsey":21004,"245":21005,"edible":21006,"##ulf":21007,"dora":21008,"establishes":21009,"##sham":21010,"##tries":21011,"contracting":21012,"##tania":21013,"cinematic":21014,"costello":21015,"nesting":21016,"##uron":21017,"connolly":21018,"duff":21019,"##nology":21020,"mma":21021,"##mata":21022,"fergus":21023,"sexes":21024,"gi":21025,"optics":21026,"spectator":21027,"woodstock":21028,"banning":21029,"##hee":21030,"##fle":21031,"differentiate":21032,"outfielder":21033,"refinery":21034,"226":21035,"312":21036,"gerhard":21037,"horde":21038,"lair":21039,"drastically":21040,"##udi":21041,"landfall":21042,"##cheng":21043,"motorsport":21044,"odi":21045,"##achi":21046,"predominant":21047,"quay":21048,"skins":21049,"##ental":21050,"edna":21051,"harshly":21052,"complementary":21053,"murdering":21054,"##aves":21055,"wreckage":21056,"##90":21057,"ono":21058,"outstretched":21059,"lennox":21060,"munitions":21061,"galen":21062,"reconcile":21063,"470":21064,"scalp":21065,"bicycles":21066,"gillespie":21067,"questionable":21068,"rosenberg":21069,"guillermo":21070,"hostel":21071,"jarvis":21072,"kabul":21073,"volvo":21074,"opium":21075,"yd":21076,"##twined":21077,"abuses":21078,"decca":21079,"outpost":21080,"##cino":21081,"sensible":21082,"neutrality":21083,"##64":21084,"ponce":21085,"anchorage":21086,"atkins":21087,"turrets":21088,"inadvertently":21089,"disagree":21090,"libre":21091,"vodka":21092,"reassuring":21093,"weighs":21094,"##yal":21095,"glide":21096,"jumper":21097,"ceilings":21098,"repertory":21099,"outs":21100,"stain":21101,"##bial":21102,"envy":21103,"##ucible":21104,"smashing":21105,"heightened":21106,"policing":21107,"hyun":21108,"mixes":21109,"lai":21110,"prima":21111,"##ples":21112,"celeste":21113,"##bina":21114,"lucrative":21115,"intervened":21116,"kc":21117,"manually":21118,"##rned":21119,"stature":21120,"staffed":21121,"bun":21122,"bastards":21123,"nairobi":21124,"priced":21125,"##auer":21126,"thatcher":21127,"##kia":21128,"tripped":21129,"comune":21130,"##ogan":21131,"##pled":21132,"brasil":21133,"incentives":21134,"emanuel":21135,"hereford":21136,"musica":21137,"##kim":21138,"benedictine":21139,"biennale":21140,"##lani":21141,"eureka":21142,"gardiner":21143,"rb":21144,"knocks":21145,"sha":21146,"##ael":21147,"##elled":21148,"##onate":21149,"efficacy":21150,"ventura":21151,"masonic":21152,"sanford":21153,"maize":21154,"leverage":21155,"##feit":21156,"capacities":21157,"santana":21158,"##aur":21159,"novelty":21160,"vanilla":21161,"##cter":21162,"##tour":21163,"benin":21164,"##oir":21165,"##rain":21166,"neptune":21167,"drafting":21168,"tallinn":21169,"##cable":21170,"humiliation":21171,"##boarding":21172,"schleswig":21173,"fabian":21174,"bernardo":21175,"liturgy":21176,"spectacle":21177,"sweeney":21178,"pont":21179,"routledge":21180,"##tment":21181,"cosmos":21182,"ut":21183,"hilt":21184,"sleek":21185,"universally":21186,"##eville":21187,"##gawa":21188,"typed":21189,"##dry":21190,"favors":21191,"allegheny":21192,"glaciers":21193,"##rly":21194,"recalling":21195,"aziz":21196,"##log":21197,"parasite":21198,"requiem":21199,"auf":21200,"##berto":21201,"##llin":21202,"illumination":21203,"##breaker":21204,"##issa":21205,"festivities":21206,"bows":21207,"govern":21208,"vibe":21209,"vp":21210,"333":21211,"sprawled":21212,"larson":21213,"pilgrim":21214,"bwf":21215,"leaping":21216,"##rts":21217,"##ssel":21218,"alexei":21219,"greyhound":21220,"hoarse":21221,"##dler":21222,"##oration":21223,"seneca":21224,"##cule":21225,"gaping":21226,"##ulously":21227,"##pura":21228,"cinnamon":21229,"##gens":21230,"##rricular":21231,"craven":21232,"fantasies":21233,"houghton":21234,"engined":21235,"reigned":21236,"dictator":21237,"supervising":21238,"##oris":21239,"bogota":21240,"commentaries":21241,"unnatural":21242,"fingernails":21243,"spirituality":21244,"tighten":21245,"##tm":21246,"canadiens":21247,"protesting":21248,"intentional":21249,"cheers":21250,"sparta":21251,"##ytic":21252,"##iere":21253,"##zine":21254,"widen":21255,"belgarath":21256,"controllers":21257,"dodd":21258,"iaaf":21259,"navarre":21260,"##ication":21261,"defect":21262,"squire":21263,"steiner":21264,"whisky":21265,"##mins":21266,"560":21267,"inevitably":21268,"tome":21269,"##gold":21270,"chew":21271,"##uid":21272,"##lid":21273,"elastic":21274,"##aby":21275,"streaked":21276,"alliances":21277,"jailed":21278,"regal":21279,"##ined":21280,"##phy":21281,"czechoslovak":21282,"narration":21283,"absently":21284,"##uld":21285,"bluegrass":21286,"guangdong":21287,"quran":21288,"criticizing":21289,"hose":21290,"hari":21291,"##liest":21292,"##owa":21293,"skier":21294,"streaks":21295,"deploy":21296,"##lom":21297,"raft":21298,"bose":21299,"dialed":21300,"huff":21301,"##eira":21302,"haifa":21303,"simplest":21304,"bursting":21305,"endings":21306,"ib":21307,"sultanate":21308,"##titled":21309,"franks":21310,"whitman":21311,"ensures":21312,"sven":21313,"##ggs":21314,"collaborators":21315,"forster":21316,"organising":21317,"ui":21318,"banished":21319,"napier":21320,"injustice":21321,"teller":21322,"layered":21323,"thump":21324,"##otti":21325,"roc":21326,"battleships":21327,"evidenced":21328,"fugitive":21329,"sadie":21330,"robotics":21331,"##roud":21332,"equatorial":21333,"geologist":21334,"##iza":21335,"yielding":21336,"##bron":21337,"##sr":21338,"internationale":21339,"mecca":21340,"##diment":21341,"sbs":21342,"skyline":21343,"toad":21344,"uploaded":21345,"reflective":21346,"undrafted":21347,"lal":21348,"leafs":21349,"bayern":21350,"##dai":21351,"lakshmi":21352,"shortlisted":21353,"##stick":21354,"##wicz":21355,"camouflage":21356,"donate":21357,"af":21358,"christi":21359,"lau":21360,"##acio":21361,"disclosed":21362,"nemesis":21363,"1761":21364,"assemble":21365,"straining":21366,"northamptonshire":21367,"tal":21368,"##asi":21369,"bernardino":21370,"premature":21371,"heidi":21372,"42nd":21373,"coefficients":21374,"galactic":21375,"reproduce":21376,"buzzed":21377,"sensations":21378,"zionist":21379,"monsieur":21380,"myrtle":21381,"##eme":21382,"archery":21383,"strangled":21384,"musically":21385,"viewpoint":21386,"antiquities":21387,"bei":21388,"trailers":21389,"seahawks":21390,"cured":21391,"pee":21392,"preferring":21393,"tasmanian":21394,"lange":21395,"sul":21396,"##mail":21397,"##working":21398,"colder":21399,"overland":21400,"lucivar":21401,"massey":21402,"gatherings":21403,"haitian":21404,"##smith":21405,"disapproval":21406,"flaws":21407,"##cco":21408,"##enbach":21409,"1766":21410,"npr":21411,"##icular":21412,"boroughs":21413,"creole":21414,"forums":21415,"techno":21416,"1755":21417,"dent":21418,"abdominal":21419,"streetcar":21420,"##eson":21421,"##stream":21422,"procurement":21423,"gemini":21424,"predictable":21425,"##tya":21426,"acheron":21427,"christoph":21428,"feeder":21429,"fronts":21430,"vendor":21431,"bernhard":21432,"jammu":21433,"tumors":21434,"slang":21435,"##uber":21436,"goaltender":21437,"twists":21438,"curving":21439,"manson":21440,"vuelta":21441,"mer":21442,"peanut":21443,"confessions":21444,"pouch":21445,"unpredictable":21446,"allowance":21447,"theodor":21448,"vascular":21449,"##factory":21450,"bala":21451,"authenticity":21452,"metabolic":21453,"coughing":21454,"nanjing":21455,"##cea":21456,"pembroke":21457,"##bard":21458,"splendid":21459,"36th":21460,"ff":21461,"hourly":21462,"##ahu":21463,"elmer":21464,"handel":21465,"##ivate":21466,"awarding":21467,"thrusting":21468,"dl":21469,"experimentation":21470,"##hesion":21471,"##46":21472,"caressed":21473,"entertained":21474,"steak":21475,"##rangle":21476,"biologist":21477,"orphans":21478,"baroness":21479,"oyster":21480,"stepfather":21481,"##dridge":21482,"mirage":21483,"reefs":21484,"speeding":21485,"##31":21486,"barons":21487,"1764":21488,"227":21489,"inhabit":21490,"preached":21491,"repealed":21492,"##tral":21493,"honoring":21494,"boogie":21495,"captives":21496,"administer":21497,"johanna":21498,"##imate":21499,"gel":21500,"suspiciously":21501,"1767":21502,"sobs":21503,"##dington":21504,"backbone":21505,"hayward":21506,"garry":21507,"##folding":21508,"##nesia":21509,"maxi":21510,"##oof":21511,"##ppe":21512,"ellison":21513,"galileo":21514,"##stand":21515,"crimea":21516,"frenzy":21517,"amour":21518,"bumper":21519,"matrices":21520,"natalia":21521,"baking":21522,"garth":21523,"palestinians":21524,"##grove":21525,"smack":21526,"conveyed":21527,"ensembles":21528,"gardening":21529,"##manship":21530,"##rup":21531,"##stituting":21532,"1640":21533,"harvesting":21534,"topography":21535,"jing":21536,"shifters":21537,"dormitory":21538,"##carriage":21539,"##lston":21540,"ist":21541,"skulls":21542,"##stadt":21543,"dolores":21544,"jewellery":21545,"sarawak":21546,"##wai":21547,"##zier":21548,"fences":21549,"christy":21550,"confinement":21551,"tumbling":21552,"credibility":21553,"fir":21554,"stench":21555,"##bria":21556,"##plication":21557,"##nged":21558,"##sam":21559,"virtues":21560,"##belt":21561,"marjorie":21562,"pba":21563,"##eem":21564,"##made":21565,"celebrates":21566,"schooner":21567,"agitated":21568,"barley":21569,"fulfilling":21570,"anthropologist":21571,"##pro":21572,"restrict":21573,"novi":21574,"regulating":21575,"##nent":21576,"padres":21577,"##rani":21578,"##hesive":21579,"loyola":21580,"tabitha":21581,"milky":21582,"olson":21583,"proprietor":21584,"crambidae":21585,"guarantees":21586,"intercollegiate":21587,"ljubljana":21588,"hilda":21589,"##sko":21590,"ignorant":21591,"hooded":21592,"##lts":21593,"sardinia":21594,"##lidae":21595,"##vation":21596,"frontman":21597,"privileged":21598,"witchcraft":21599,"##gp":21600,"jammed":21601,"laude":21602,"poking":21603,"##than":21604,"bracket":21605,"amazement":21606,"yunnan":21607,"##erus":21608,"maharaja":21609,"linnaeus":21610,"264":21611,"commissioning":21612,"milano":21613,"peacefully":21614,"##logies":21615,"akira":21616,"rani":21617,"regulator":21618,"##36":21619,"grasses":21620,"##rance":21621,"luzon":21622,"crows":21623,"compiler":21624,"gretchen":21625,"seaman":21626,"edouard":21627,"tab":21628,"buccaneers":21629,"ellington":21630,"hamlets":21631,"whig":21632,"socialists":21633,"##anto":21634,"directorial":21635,"easton":21636,"mythological":21637,"##kr":21638,"##vary":21639,"rhineland":21640,"semantic":21641,"taut":21642,"dune":21643,"inventions":21644,"succeeds":21645,"##iter":21646,"replication":21647,"branched":21648,"##pired":21649,"jul":21650,"prosecuted":21651,"kangaroo":21652,"penetrated":21653,"##avian":21654,"middlesbrough":21655,"doses":21656,"bleak":21657,"madam":21658,"predatory":21659,"relentless":21660,"##vili":21661,"reluctance":21662,"##vir":21663,"hailey":21664,"crore":21665,"silvery":21666,"1759":21667,"monstrous":21668,"swimmers":21669,"transmissions":21670,"hawthorn":21671,"informing":21672,"##eral":21673,"toilets":21674,"caracas":21675,"crouch":21676,"kb":21677,"##sett":21678,"295":21679,"cartel":21680,"hadley":21681,"##aling":21682,"alexia":21683,"yvonne":21684,"##biology":21685,"cinderella":21686,"eton":21687,"superb":21688,"blizzard":21689,"stabbing":21690,"industrialist":21691,"maximus":21692,"##gm":21693,"##orus":21694,"groves":21695,"maud":21696,"clade":21697,"oversized":21698,"comedic":21699,"##bella":21700,"rosen":21701,"nomadic":21702,"fulham":21703,"montane":21704,"beverages":21705,"galaxies":21706,"redundant":21707,"swarm":21708,"##rot":21709,"##folia":21710,"##llis":21711,"buckinghamshire":21712,"fen":21713,"bearings":21714,"bahadur":21715,"##rom":21716,"gilles":21717,"phased":21718,"dynamite":21719,"faber":21720,"benoit":21721,"vip":21722,"##ount":21723,"##wd":21724,"booking":21725,"fractured":21726,"tailored":21727,"anya":21728,"spices":21729,"westwood":21730,"cairns":21731,"auditions":21732,"inflammation":21733,"steamed":21734,"##rocity":21735,"##acion":21736,"##urne":21737,"skyla":21738,"thereof":21739,"watford":21740,"torment":21741,"archdeacon":21742,"transforms":21743,"lulu":21744,"demeanor":21745,"fucked":21746,"serge":21747,"##sor":21748,"mckenna":21749,"minas":21750,"entertainer":21751,"##icide":21752,"caress":21753,"originate":21754,"residue":21755,"##sty":21756,"1740":21757,"##ilised":21758,"##org":21759,"beech":21760,"##wana":21761,"subsidies":21762,"##ghton":21763,"emptied":21764,"gladstone":21765,"ru":21766,"firefighters":21767,"voodoo":21768,"##rcle":21769,"het":21770,"nightingale":21771,"tamara":21772,"edmond":21773,"ingredient":21774,"weaknesses":21775,"silhouette":21776,"285":21777,"compatibility":21778,"withdrawing":21779,"hampson":21780,"##mona":21781,"anguish":21782,"giggling":21783,"##mber":21784,"bookstore":21785,"##jiang":21786,"southernmost":21787,"tilting":21788,"##vance":21789,"bai":21790,"economical":21791,"rf":21792,"briefcase":21793,"dreadful":21794,"hinted":21795,"projections":21796,"shattering":21797,"totaling":21798,"##rogate":21799,"analogue":21800,"indicted":21801,"periodical":21802,"fullback":21803,"##dman":21804,"haynes":21805,"##tenberg":21806,"##ffs":21807,"##ishment":21808,"1745":21809,"thirst":21810,"stumble":21811,"penang":21812,"vigorous":21813,"##ddling":21814,"##kor":21815,"##lium":21816,"octave":21817,"##ove":21818,"##enstein":21819,"##inen":21820,"##ones":21821,"siberian":21822,"##uti":21823,"cbn":21824,"repeal":21825,"swaying":21826,"##vington":21827,"khalid":21828,"tanaka":21829,"unicorn":21830,"otago":21831,"plastered":21832,"lobe":21833,"riddle":21834,"##rella":21835,"perch":21836,"##ishing":21837,"croydon":21838,"filtered":21839,"graeme":21840,"tripoli":21841,"##ossa":21842,"crocodile":21843,"##chers":21844,"sufi":21845,"mined":21846,"##tung":21847,"inferno":21848,"lsu":21849,"##phi":21850,"swelled":21851,"utilizes":21852,"£2":21853,"cale":21854,"periodicals":21855,"styx":21856,"hike":21857,"informally":21858,"coop":21859,"lund":21860,"##tidae":21861,"ala":21862,"hen":21863,"qui":21864,"transformations":21865,"disposed":21866,"sheath":21867,"chickens":21868,"##cade":21869,"fitzroy":21870,"sas":21871,"silesia":21872,"unacceptable":21873,"odisha":21874,"1650":21875,"sabrina":21876,"pe":21877,"spokane":21878,"ratios":21879,"athena":21880,"massage":21881,"shen":21882,"dilemma":21883,"##drum":21884,"##riz":21885,"##hul":21886,"corona":21887,"doubtful":21888,"niall":21889,"##pha":21890,"##bino":21891,"fines":21892,"cite":21893,"acknowledging":21894,"bangor":21895,"ballard":21896,"bathurst":21897,"##resh":21898,"huron":21899,"mustered":21900,"alzheimer":21901,"garments":21902,"kinase":21903,"tyre":21904,"warship":21905,"##cp":21906,"flashback":21907,"pulmonary":21908,"braun":21909,"cheat":21910,"kamal":21911,"cyclists":21912,"constructions":21913,"grenades":21914,"ndp":21915,"traveller":21916,"excuses":21917,"stomped":21918,"signalling":21919,"trimmed":21920,"futsal":21921,"mosques":21922,"relevance":21923,"##wine":21924,"wta":21925,"##23":21926,"##vah":21927,"##lter":21928,"hoc":21929,"##riding":21930,"optimistic":21931,"##´s":21932,"deco":21933,"sim":21934,"interacting":21935,"rejecting":21936,"moniker":21937,"waterways":21938,"##ieri":21939,"##oku":21940,"mayors":21941,"gdansk":21942,"outnumbered":21943,"pearls":21944,"##ended":21945,"##hampton":21946,"fairs":21947,"totals":21948,"dominating":21949,"262":21950,"notions":21951,"stairway":21952,"compiling":21953,"pursed":21954,"commodities":21955,"grease":21956,"yeast":21957,"##jong":21958,"carthage":21959,"griffiths":21960,"residual":21961,"amc":21962,"contraction":21963,"laird":21964,"sapphire":21965,"##marine":21966,"##ivated":21967,"amalgamation":21968,"dissolve":21969,"inclination":21970,"lyle":21971,"packaged":21972,"altitudes":21973,"suez":21974,"canons":21975,"graded":21976,"lurched":21977,"narrowing":21978,"boasts":21979,"guise":21980,"wed":21981,"enrico":21982,"##ovsky":21983,"rower":21984,"scarred":21985,"bree":21986,"cub":21987,"iberian":21988,"protagonists":21989,"bargaining":21990,"proposing":21991,"trainers":21992,"voyages":21993,"vans":21994,"fishes":21995,"##aea":21996,"##ivist":21997,"##verance":21998,"encryption":21999,"artworks":22000,"kazan":22001,"sabre":22002,"cleopatra":22003,"hepburn":22004,"rotting":22005,"supremacy":22006,"mecklenburg":22007,"##brate":22008,"burrows":22009,"hazards":22010,"outgoing":22011,"flair":22012,"organizes":22013,"##ctions":22014,"scorpion":22015,"##usions":22016,"boo":22017,"234":22018,"chevalier":22019,"dunedin":22020,"slapping":22021,"##34":22022,"ineligible":22023,"pensions":22024,"##38":22025,"##omic":22026,"manufactures":22027,"emails":22028,"bismarck":22029,"238":22030,"weakening":22031,"blackish":22032,"ding":22033,"mcgee":22034,"quo":22035,"##rling":22036,"northernmost":22037,"xx":22038,"manpower":22039,"greed":22040,"sampson":22041,"clicking":22042,"##ange":22043,"##horpe":22044,"##inations":22045,"##roving":22046,"torre":22047,"##eptive":22048,"##moral":22049,"symbolism":22050,"38th":22051,"asshole":22052,"meritorious":22053,"outfits":22054,"splashed":22055,"biographies":22056,"sprung":22057,"astros":22058,"##tale":22059,"302":22060,"737":22061,"filly":22062,"raoul":22063,"nw":22064,"tokugawa":22065,"linden":22066,"clubhouse":22067,"##apa":22068,"tracts":22069,"romano":22070,"##pio":22071,"putin":22072,"tags":22073,"##note":22074,"chained":22075,"dickson":22076,"gunshot":22077,"moe":22078,"gunn":22079,"rashid":22080,"##tails":22081,"zipper":22082,"##bas":22083,"##nea":22084,"contrasted":22085,"##ply":22086,"##udes":22087,"plum":22088,"pharaoh":22089,"##pile":22090,"aw":22091,"comedies":22092,"ingrid":22093,"sandwiches":22094,"subdivisions":22095,"1100":22096,"mariana":22097,"nokia":22098,"kamen":22099,"hz":22100,"delaney":22101,"veto":22102,"herring":22103,"##words":22104,"possessive":22105,"outlines":22106,"##roup":22107,"siemens":22108,"stairwell":22109,"rc":22110,"gallantry":22111,"messiah":22112,"palais":22113,"yells":22114,"233":22115,"zeppelin":22116,"##dm":22117,"bolivar":22118,"##cede":22119,"smackdown":22120,"mckinley":22121,"##mora":22122,"##yt":22123,"muted":22124,"geologic":22125,"finely":22126,"unitary":22127,"avatar":22128,"hamas":22129,"maynard":22130,"rees":22131,"bog":22132,"contrasting":22133,"##rut":22134,"liv":22135,"chico":22136,"disposition":22137,"pixel":22138,"##erate":22139,"becca":22140,"dmitry":22141,"yeshiva":22142,"narratives":22143,"##lva":22144,"##ulton":22145,"mercenary":22146,"sharpe":22147,"tempered":22148,"navigate":22149,"stealth":22150,"amassed":22151,"keynes":22152,"##lini":22153,"untouched":22154,"##rrie":22155,"havoc":22156,"lithium":22157,"##fighting":22158,"abyss":22159,"graf":22160,"southward":22161,"wolverine":22162,"balloons":22163,"implements":22164,"ngos":22165,"transitions":22166,"##icum":22167,"ambushed":22168,"concacaf":22169,"dormant":22170,"economists":22171,"##dim":22172,"costing":22173,"csi":22174,"rana":22175,"universite":22176,"boulders":22177,"verity":22178,"##llon":22179,"collin":22180,"mellon":22181,"misses":22182,"cypress":22183,"fluorescent":22184,"lifeless":22185,"spence":22186,"##ulla":22187,"crewe":22188,"shepard":22189,"pak":22190,"revelations":22191,"##م":22192,"jolly":22193,"gibbons":22194,"paw":22195,"##dro":22196,"##quel":22197,"freeing":22198,"##test":22199,"shack":22200,"fries":22201,"palatine":22202,"##51":22203,"##hiko":22204,"accompaniment":22205,"cruising":22206,"recycled":22207,"##aver":22208,"erwin":22209,"sorting":22210,"synthesizers":22211,"dyke":22212,"realities":22213,"sg":22214,"strides":22215,"enslaved":22216,"wetland":22217,"##ghan":22218,"competence":22219,"gunpowder":22220,"grassy":22221,"maroon":22222,"reactors":22223,"objection":22224,"##oms":22225,"carlson":22226,"gearbox":22227,"macintosh":22228,"radios":22229,"shelton":22230,"##sho":22231,"clergyman":22232,"prakash":22233,"254":22234,"mongols":22235,"trophies":22236,"oricon":22237,"228":22238,"stimuli":22239,"twenty20":22240,"cantonese":22241,"cortes":22242,"mirrored":22243,"##saurus":22244,"bhp":22245,"cristina":22246,"melancholy":22247,"##lating":22248,"enjoyable":22249,"nuevo":22250,"##wny":22251,"downfall":22252,"schumacher":22253,"##ind":22254,"banging":22255,"lausanne":22256,"rumbled":22257,"paramilitary":22258,"reflex":22259,"ax":22260,"amplitude":22261,"migratory":22262,"##gall":22263,"##ups":22264,"midi":22265,"barnard":22266,"lastly":22267,"sherry":22268,"##hp":22269,"##nall":22270,"keystone":22271,"##kra":22272,"carleton":22273,"slippery":22274,"##53":22275,"coloring":22276,"foe":22277,"socket":22278,"otter":22279,"##rgos":22280,"mats":22281,"##tose":22282,"consultants":22283,"bafta":22284,"bison":22285,"topping":22286,"##km":22287,"490":22288,"primal":22289,"abandonment":22290,"transplant":22291,"atoll":22292,"hideous":22293,"mort":22294,"pained":22295,"reproduced":22296,"tae":22297,"howling":22298,"##turn":22299,"unlawful":22300,"billionaire":22301,"hotter":22302,"poised":22303,"lansing":22304,"##chang":22305,"dinamo":22306,"retro":22307,"messing":22308,"nfc":22309,"domesday":22310,"##mina":22311,"blitz":22312,"timed":22313,"##athing":22314,"##kley":22315,"ascending":22316,"gesturing":22317,"##izations":22318,"signaled":22319,"tis":22320,"chinatown":22321,"mermaid":22322,"savanna":22323,"jameson":22324,"##aint":22325,"catalina":22326,"##pet":22327,"##hers":22328,"cochrane":22329,"cy":22330,"chatting":22331,"##kus":22332,"alerted":22333,"computation":22334,"mused":22335,"noelle":22336,"majestic":22337,"mohawk":22338,"campo":22339,"octagonal":22340,"##sant":22341,"##hend":22342,"241":22343,"aspiring":22344,"##mart":22345,"comprehend":22346,"iona":22347,"paralyzed":22348,"shimmering":22349,"swindon":22350,"rhone":22351,"##eley":22352,"reputed":22353,"configurations":22354,"pitchfork":22355,"agitation":22356,"francais":22357,"gillian":22358,"lipstick":22359,"##ilo":22360,"outsiders":22361,"pontifical":22362,"resisting":22363,"bitterness":22364,"sewer":22365,"rockies":22366,"##edd":22367,"##ucher":22368,"misleading":22369,"1756":22370,"exiting":22371,"galloway":22372,"##nging":22373,"risked":22374,"##heart":22375,"246":22376,"commemoration":22377,"schultz":22378,"##rka":22379,"integrating":22380,"##rsa":22381,"poses":22382,"shrieked":22383,"##weiler":22384,"guineas":22385,"gladys":22386,"jerking":22387,"owls":22388,"goldsmith":22389,"nightly":22390,"penetrating":22391,"##unced":22392,"lia":22393,"##33":22394,"ignited":22395,"betsy":22396,"##aring":22397,"##thorpe":22398,"follower":22399,"vigorously":22400,"##rave":22401,"coded":22402,"kiran":22403,"knit":22404,"zoology":22405,"tbilisi":22406,"##28":22407,"##bered":22408,"repository":22409,"govt":22410,"deciduous":22411,"dino":22412,"growling":22413,"##bba":22414,"enhancement":22415,"unleashed":22416,"chanting":22417,"pussy":22418,"biochemistry":22419,"##eric":22420,"kettle":22421,"repression":22422,"toxicity":22423,"nrhp":22424,"##arth":22425,"##kko":22426,"##bush":22427,"ernesto":22428,"commended":22429,"outspoken":22430,"242":22431,"mca":22432,"parchment":22433,"sms":22434,"kristen":22435,"##aton":22436,"bisexual":22437,"raked":22438,"glamour":22439,"navajo":22440,"a2":22441,"conditioned":22442,"showcased":22443,"##hma":22444,"spacious":22445,"youthful":22446,"##esa":22447,"usl":22448,"appliances":22449,"junta":22450,"brest":22451,"layne":22452,"conglomerate":22453,"enchanted":22454,"chao":22455,"loosened":22456,"picasso":22457,"circulating":22458,"inspect":22459,"montevideo":22460,"##centric":22461,"##kti":22462,"piazza":22463,"spurred":22464,"##aith":22465,"bari":22466,"freedoms":22467,"poultry":22468,"stamford":22469,"lieu":22470,"##ect":22471,"indigo":22472,"sarcastic":22473,"bahia":22474,"stump":22475,"attach":22476,"dvds":22477,"frankenstein":22478,"lille":22479,"approx":22480,"scriptures":22481,"pollen":22482,"##script":22483,"nmi":22484,"overseen":22485,"##ivism":22486,"tides":22487,"proponent":22488,"newmarket":22489,"inherit":22490,"milling":22491,"##erland":22492,"centralized":22493,"##rou":22494,"distributors":22495,"credentials":22496,"drawers":22497,"abbreviation":22498,"##lco":22499,"##xon":22500,"downing":22501,"uncomfortably":22502,"ripe":22503,"##oes":22504,"erase":22505,"franchises":22506,"##ever":22507,"populace":22508,"##bery":22509,"##khar":22510,"decomposition":22511,"pleas":22512,"##tet":22513,"daryl":22514,"sabah":22515,"##stle":22516,"##wide":22517,"fearless":22518,"genie":22519,"lesions":22520,"annette":22521,"##ogist":22522,"oboe":22523,"appendix":22524,"nair":22525,"dripped":22526,"petitioned":22527,"maclean":22528,"mosquito":22529,"parrot":22530,"rpg":22531,"hampered":22532,"1648":22533,"operatic":22534,"reservoirs":22535,"##tham":22536,"irrelevant":22537,"jolt":22538,"summarized":22539,"##fp":22540,"medallion":22541,"##taff":22542,"##−":22543,"clawed":22544,"harlow":22545,"narrower":22546,"goddard":22547,"marcia":22548,"bodied":22549,"fremont":22550,"suarez":22551,"altering":22552,"tempest":22553,"mussolini":22554,"porn":22555,"##isms":22556,"sweetly":22557,"oversees":22558,"walkers":22559,"solitude":22560,"grimly":22561,"shrines":22562,"hk":22563,"ich":22564,"supervisors":22565,"hostess":22566,"dietrich":22567,"legitimacy":22568,"brushes":22569,"expressive":22570,"##yp":22571,"dissipated":22572,"##rse":22573,"localized":22574,"systemic":22575,"##nikov":22576,"gettysburg":22577,"##js":22578,"##uaries":22579,"dialogues":22580,"muttering":22581,"251":22582,"housekeeper":22583,"sicilian":22584,"discouraged":22585,"##frey":22586,"beamed":22587,"kaladin":22588,"halftime":22589,"kidnap":22590,"##amo":22591,"##llet":22592,"1754":22593,"synonymous":22594,"depleted":22595,"instituto":22596,"insulin":22597,"reprised":22598,"##opsis":22599,"clashed":22600,"##ctric":22601,"interrupting":22602,"radcliffe":22603,"insisting":22604,"medici":22605,"1715":22606,"ejected":22607,"playfully":22608,"turbulent":22609,"##47":22610,"starvation":22611,"##rini":22612,"shipment":22613,"rebellious":22614,"petersen":22615,"verification":22616,"merits":22617,"##rified":22618,"cakes":22619,"##charged":22620,"1757":22621,"milford":22622,"shortages":22623,"spying":22624,"fidelity":22625,"##aker":22626,"emitted":22627,"storylines":22628,"harvested":22629,"seismic":22630,"##iform":22631,"cheung":22632,"kilda":22633,"theoretically":22634,"barbie":22635,"lynx":22636,"##rgy":22637,"##tius":22638,"goblin":22639,"mata":22640,"poisonous":22641,"##nburg":22642,"reactive":22643,"residues":22644,"obedience":22645,"##евич":22646,"conjecture":22647,"##rac":22648,"401":22649,"hating":22650,"sixties":22651,"kicker":22652,"moaning":22653,"motown":22654,"##bha":22655,"emancipation":22656,"neoclassical":22657,"##hering":22658,"consoles":22659,"ebert":22660,"professorship":22661,"##tures":22662,"sustaining":22663,"assaults":22664,"obeyed":22665,"affluent":22666,"incurred":22667,"tornadoes":22668,"##eber":22669,"##zow":22670,"emphasizing":22671,"highlanders":22672,"cheated":22673,"helmets":22674,"##ctus":22675,"internship":22676,"terence":22677,"bony":22678,"executions":22679,"legislators":22680,"berries":22681,"peninsular":22682,"tinged":22683,"##aco":22684,"1689":22685,"amplifier":22686,"corvette":22687,"ribbons":22688,"lavish":22689,"pennant":22690,"##lander":22691,"worthless":22692,"##chfield":22693,"##forms":22694,"mariano":22695,"pyrenees":22696,"expenditures":22697,"##icides":22698,"chesterfield":22699,"mandir":22700,"tailor":22701,"39th":22702,"sergey":22703,"nestled":22704,"willed":22705,"aristocracy":22706,"devotees":22707,"goodnight":22708,"raaf":22709,"rumored":22710,"weaponry":22711,"remy":22712,"appropriations":22713,"harcourt":22714,"burr":22715,"riaa":22716,"##lence":22717,"limitation":22718,"unnoticed":22719,"guo":22720,"soaking":22721,"swamps":22722,"##tica":22723,"collapsing":22724,"tatiana":22725,"descriptive":22726,"brigham":22727,"psalm":22728,"##chment":22729,"maddox":22730,"##lization":22731,"patti":22732,"caliph":22733,"##aja":22734,"akron":22735,"injuring":22736,"serra":22737,"##ganj":22738,"basins":22739,"##sari":22740,"astonished":22741,"launcher":22742,"##church":22743,"hilary":22744,"wilkins":22745,"sewing":22746,"##sf":22747,"stinging":22748,"##fia":22749,"##ncia":22750,"underwood":22751,"startup":22752,"##ition":22753,"compilations":22754,"vibrations":22755,"embankment":22756,"jurist":22757,"##nity":22758,"bard":22759,"juventus":22760,"groundwater":22761,"kern":22762,"palaces":22763,"helium":22764,"boca":22765,"cramped":22766,"marissa":22767,"soto":22768,"##worm":22769,"jae":22770,"princely":22771,"##ggy":22772,"faso":22773,"bazaar":22774,"warmly":22775,"##voking":22776,"229":22777,"pairing":22778,"##lite":22779,"##grate":22780,"##nets":22781,"wien":22782,"freaked":22783,"ulysses":22784,"rebirth":22785,"##alia":22786,"##rent":22787,"mummy":22788,"guzman":22789,"jimenez":22790,"stilled":22791,"##nitz":22792,"trajectory":22793,"tha":22794,"woken":22795,"archival":22796,"professions":22797,"##pts":22798,"##pta":22799,"hilly":22800,"shadowy":22801,"shrink":22802,"##bolt":22803,"norwood":22804,"glued":22805,"migrate":22806,"stereotypes":22807,"devoid":22808,"##pheus":22809,"625":22810,"evacuate":22811,"horrors":22812,"infancy":22813,"gotham":22814,"knowles":22815,"optic":22816,"downloaded":22817,"sachs":22818,"kingsley":22819,"parramatta":22820,"darryl":22821,"mor":22822,"##onale":22823,"shady":22824,"commence":22825,"confesses":22826,"kan":22827,"##meter":22828,"##placed":22829,"marlborough":22830,"roundabout":22831,"regents":22832,"frigates":22833,"io":22834,"##imating":22835,"gothenburg":22836,"revoked":22837,"carvings":22838,"clockwise":22839,"convertible":22840,"intruder":22841,"##sche":22842,"banged":22843,"##ogo":22844,"vicky":22845,"bourgeois":22846,"##mony":22847,"dupont":22848,"footing":22849,"##gum":22850,"pd":22851,"##real":22852,"buckle":22853,"yun":22854,"penthouse":22855,"sane":22856,"720":22857,"serviced":22858,"stakeholders":22859,"neumann":22860,"bb":22861,"##eers":22862,"comb":22863,"##gam":22864,"catchment":22865,"pinning":22866,"rallies":22867,"typing":22868,"##elles":22869,"forefront":22870,"freiburg":22871,"sweetie":22872,"giacomo":22873,"widowed":22874,"goodwill":22875,"worshipped":22876,"aspirations":22877,"midday":22878,"##vat":22879,"fishery":22880,"##trick":22881,"bournemouth":22882,"turk":22883,"243":22884,"hearth":22885,"ethanol":22886,"guadalajara":22887,"murmurs":22888,"sl":22889,"##uge":22890,"afforded":22891,"scripted":22892,"##hta":22893,"wah":22894,"##jn":22895,"coroner":22896,"translucent":22897,"252":22898,"memorials":22899,"puck":22900,"progresses":22901,"clumsy":22902,"##race":22903,"315":22904,"candace":22905,"recounted":22906,"##27":22907,"##slin":22908,"##uve":22909,"filtering":22910,"##mac":22911,"howl":22912,"strata":22913,"heron":22914,"leveled":22915,"##ays":22916,"dubious":22917,"##oja":22918,"##т":22919,"##wheel":22920,"citations":22921,"exhibiting":22922,"##laya":22923,"##mics":22924,"##pods":22925,"turkic":22926,"##lberg":22927,"injunction":22928,"##ennial":22929,"##mit":22930,"antibodies":22931,"##44":22932,"organise":22933,"##rigues":22934,"cardiovascular":22935,"cushion":22936,"inverness":22937,"##zquez":22938,"dia":22939,"cocoa":22940,"sibling":22941,"##tman":22942,"##roid":22943,"expanse":22944,"feasible":22945,"tunisian":22946,"algiers":22947,"##relli":22948,"rus":22949,"bloomberg":22950,"dso":22951,"westphalia":22952,"bro":22953,"tacoma":22954,"281":22955,"downloads":22956,"##ours":22957,"konrad":22958,"duran":22959,"##hdi":22960,"continuum":22961,"jett":22962,"compares":22963,"legislator":22964,"secession":22965,"##nable":22966,"##gues":22967,"##zuka":22968,"translating":22969,"reacher":22970,"##gley":22971,"##ła":22972,"aleppo":22973,"##agi":22974,"tc":22975,"orchards":22976,"trapping":22977,"linguist":22978,"versatile":22979,"drumming":22980,"postage":22981,"calhoun":22982,"superiors":22983,"##mx":22984,"barefoot":22985,"leary":22986,"##cis":22987,"ignacio":22988,"alfa":22989,"kaplan":22990,"##rogen":22991,"bratislava":22992,"mori":22993,"##vot":22994,"disturb":22995,"haas":22996,"313":22997,"cartridges":22998,"gilmore":22999,"radiated":23000,"salford":23001,"tunic":23002,"hades":23003,"##ulsive":23004,"archeological":23005,"delilah":23006,"magistrates":23007,"auditioned":23008,"brewster":23009,"charters":23010,"empowerment":23011,"blogs":23012,"cappella":23013,"dynasties":23014,"iroquois":23015,"whipping":23016,"##krishna":23017,"raceway":23018,"truths":23019,"myra":23020,"weaken":23021,"judah":23022,"mcgregor":23023,"##horse":23024,"mic":23025,"refueling":23026,"37th":23027,"burnley":23028,"bosses":23029,"markus":23030,"premio":23031,"query":23032,"##gga":23033,"dunbar":23034,"##economic":23035,"darkest":23036,"lyndon":23037,"sealing":23038,"commendation":23039,"reappeared":23040,"##mun":23041,"addicted":23042,"ezio":23043,"slaughtered":23044,"satisfactory":23045,"shuffle":23046,"##eves":23047,"##thic":23048,"##uj":23049,"fortification":23050,"warrington":23051,"##otto":23052,"resurrected":23053,"fargo":23054,"mane":23055,"##utable":23056,"##lei":23057,"##space":23058,"foreword":23059,"ox":23060,"##aris":23061,"##vern":23062,"abrams":23063,"hua":23064,"##mento":23065,"sakura":23066,"##alo":23067,"uv":23068,"sentimental":23069,"##skaya":23070,"midfield":23071,"##eses":23072,"sturdy":23073,"scrolls":23074,"macleod":23075,"##kyu":23076,"entropy":23077,"##lance":23078,"mitochondrial":23079,"cicero":23080,"excelled":23081,"thinner":23082,"convoys":23083,"perceive":23084,"##oslav":23085,"##urable":23086,"systematically":23087,"grind":23088,"burkina":23089,"287":23090,"##tagram":23091,"ops":23092,"##aman":23093,"guantanamo":23094,"##cloth":23095,"##tite":23096,"forcefully":23097,"wavy":23098,"##jou":23099,"pointless":23100,"##linger":23101,"##tze":23102,"layton":23103,"portico":23104,"superficial":23105,"clerical":23106,"outlaws":23107,"##hism":23108,"burials":23109,"muir":23110,"##inn":23111,"creditors":23112,"hauling":23113,"rattle":23114,"##leg":23115,"calais":23116,"monde":23117,"archers":23118,"reclaimed":23119,"dwell":23120,"wexford":23121,"hellenic":23122,"falsely":23123,"remorse":23124,"##tek":23125,"dough":23126,"furnishings":23127,"##uttered":23128,"gabon":23129,"neurological":23130,"novice":23131,"##igraphy":23132,"contemplated":23133,"pulpit":23134,"nightstand":23135,"saratoga":23136,"##istan":23137,"documenting":23138,"pulsing":23139,"taluk":23140,"##firmed":23141,"busted":23142,"marital":23143,"##rien":23144,"disagreements":23145,"wasps":23146,"##yes":23147,"hodge":23148,"mcdonnell":23149,"mimic":23150,"fran":23151,"pendant":23152,"dhabi":23153,"musa":23154,"##nington":23155,"congratulations":23156,"argent":23157,"darrell":23158,"concussion":23159,"losers":23160,"regrets":23161,"thessaloniki":23162,"reversal":23163,"donaldson":23164,"hardwood":23165,"thence":23166,"achilles":23167,"ritter":23168,"##eran":23169,"demonic":23170,"jurgen":23171,"prophets":23172,"goethe":23173,"eki":23174,"classmate":23175,"buff":23176,"##cking":23177,"yank":23178,"irrational":23179,"##inging":23180,"perished":23181,"seductive":23182,"qur":23183,"sourced":23184,"##crat":23185,"##typic":23186,"mustard":23187,"ravine":23188,"barre":23189,"horizontally":23190,"characterization":23191,"phylogenetic":23192,"boise":23193,"##dit":23194,"##runner":23195,"##tower":23196,"brutally":23197,"intercourse":23198,"seduce":23199,"##bbing":23200,"fay":23201,"ferris":23202,"ogden":23203,"amar":23204,"nik":23205,"unarmed":23206,"##inator":23207,"evaluating":23208,"kyrgyzstan":23209,"sweetness":23210,"##lford":23211,"##oki":23212,"mccormick":23213,"meiji":23214,"notoriety":23215,"stimulate":23216,"disrupt":23217,"figuring":23218,"instructional":23219,"mcgrath":23220,"##zoo":23221,"groundbreaking":23222,"##lto":23223,"flinch":23224,"khorasan":23225,"agrarian":23226,"bengals":23227,"mixer":23228,"radiating":23229,"##sov":23230,"ingram":23231,"pitchers":23232,"nad":23233,"tariff":23234,"##cript":23235,"tata":23236,"##codes":23237,"##emi":23238,"##ungen":23239,"appellate":23240,"lehigh":23241,"##bled":23242,"##giri":23243,"brawl":23244,"duct":23245,"texans":23246,"##ciation":23247,"##ropolis":23248,"skipper":23249,"speculative":23250,"vomit":23251,"doctrines":23252,"stresses":23253,"253":23254,"davy":23255,"graders":23256,"whitehead":23257,"jozef":23258,"timely":23259,"cumulative":23260,"haryana":23261,"paints":23262,"appropriately":23263,"boon":23264,"cactus":23265,"##ales":23266,"##pid":23267,"dow":23268,"legions":23269,"##pit":23270,"perceptions":23271,"1730":23272,"picturesque":23273,"##yse":23274,"periphery":23275,"rune":23276,"wr":23277,"##aha":23278,"celtics":23279,"sentencing":23280,"whoa":23281,"##erin":23282,"confirms":23283,"variance":23284,"425":23285,"moines":23286,"mathews":23287,"spade":23288,"rave":23289,"m1":23290,"fronted":23291,"fx":23292,"blending":23293,"alleging":23294,"reared":23295,"##gl":23296,"237":23297,"##paper":23298,"grassroots":23299,"eroded":23300,"##free":23301,"##physical":23302,"directs":23303,"ordeal":23304,"##sław":23305,"accelerate":23306,"hacker":23307,"rooftop":23308,"##inia":23309,"lev":23310,"buys":23311,"cebu":23312,"devote":23313,"##lce":23314,"specialising":23315,"##ulsion":23316,"choreographed":23317,"repetition":23318,"warehouses":23319,"##ryl":23320,"paisley":23321,"tuscany":23322,"analogy":23323,"sorcerer":23324,"hash":23325,"huts":23326,"shards":23327,"descends":23328,"exclude":23329,"nix":23330,"chaplin":23331,"gaga":23332,"ito":23333,"vane":23334,"##drich":23335,"causeway":23336,"misconduct":23337,"limo":23338,"orchestrated":23339,"glands":23340,"jana":23341,"##kot":23342,"u2":23343,"##mple":23344,"##sons":23345,"branching":23346,"contrasts":23347,"scoop":23348,"longed":23349,"##virus":23350,"chattanooga":23351,"##75":23352,"syrup":23353,"cornerstone":23354,"##tized":23355,"##mind":23356,"##iaceae":23357,"careless":23358,"precedence":23359,"frescoes":23360,"##uet":23361,"chilled":23362,"consult":23363,"modelled":23364,"snatch":23365,"peat":23366,"##thermal":23367,"caucasian":23368,"humane":23369,"relaxation":23370,"spins":23371,"temperance":23372,"##lbert":23373,"occupations":23374,"lambda":23375,"hybrids":23376,"moons":23377,"mp3":23378,"##oese":23379,"247":23380,"rolf":23381,"societal":23382,"yerevan":23383,"ness":23384,"##ssler":23385,"befriended":23386,"mechanized":23387,"nominate":23388,"trough":23389,"boasted":23390,"cues":23391,"seater":23392,"##hom":23393,"bends":23394,"##tangle":23395,"conductors":23396,"emptiness":23397,"##lmer":23398,"eurasian":23399,"adriatic":23400,"tian":23401,"##cie":23402,"anxiously":23403,"lark":23404,"propellers":23405,"chichester":23406,"jock":23407,"ev":23408,"2a":23409,"##holding":23410,"credible":23411,"recounts":23412,"tori":23413,"loyalist":23414,"abduction":23415,"##hoot":23416,"##redo":23417,"nepali":23418,"##mite":23419,"ventral":23420,"tempting":23421,"##ango":23422,"##crats":23423,"steered":23424,"##wice":23425,"javelin":23426,"dipping":23427,"laborers":23428,"prentice":23429,"looming":23430,"titanium":23431,"##ː":23432,"badges":23433,"emir":23434,"tensor":23435,"##ntation":23436,"egyptians":23437,"rash":23438,"denies":23439,"hawthorne":23440,"lombard":23441,"showers":23442,"wehrmacht":23443,"dietary":23444,"trojan":23445,"##reus":23446,"welles":23447,"executing":23448,"horseshoe":23449,"lifeboat":23450,"##lak":23451,"elsa":23452,"infirmary":23453,"nearing":23454,"roberta":23455,"boyer":23456,"mutter":23457,"trillion":23458,"joanne":23459,"##fine":23460,"##oked":23461,"sinks":23462,"vortex":23463,"uruguayan":23464,"clasp":23465,"sirius":23466,"##block":23467,"accelerator":23468,"prohibit":23469,"sunken":23470,"byu":23471,"chronological":23472,"diplomats":23473,"ochreous":23474,"510":23475,"symmetrical":23476,"1644":23477,"maia":23478,"##tology":23479,"salts":23480,"reigns":23481,"atrocities":23482,"##ия":23483,"hess":23484,"bared":23485,"issn":23486,"##vyn":23487,"cater":23488,"saturated":23489,"##cycle":23490,"##isse":23491,"sable":23492,"voyager":23493,"dyer":23494,"yusuf":23495,"##inge":23496,"fountains":23497,"wolff":23498,"##39":23499,"##nni":23500,"engraving":23501,"rollins":23502,"atheist":23503,"ominous":23504,"##ault":23505,"herr":23506,"chariot":23507,"martina":23508,"strung":23509,"##fell":23510,"##farlane":23511,"horrific":23512,"sahib":23513,"gazes":23514,"saetan":23515,"erased":23516,"ptolemy":23517,"##olic":23518,"flushing":23519,"lauderdale":23520,"analytic":23521,"##ices":23522,"530":23523,"navarro":23524,"beak":23525,"gorilla":23526,"herrera":23527,"broom":23528,"guadalupe":23529,"raiding":23530,"sykes":23531,"311":23532,"bsc":23533,"deliveries":23534,"1720":23535,"invasions":23536,"carmichael":23537,"tajikistan":23538,"thematic":23539,"ecumenical":23540,"sentiments":23541,"onstage":23542,"##rians":23543,"##brand":23544,"##sume":23545,"catastrophic":23546,"flanks":23547,"molten":23548,"##arns":23549,"waller":23550,"aimee":23551,"terminating":23552,"##icing":23553,"alternately":23554,"##oche":23555,"nehru":23556,"printers":23557,"outraged":23558,"##eving":23559,"empires":23560,"template":23561,"banners":23562,"repetitive":23563,"za":23564,"##oise":23565,"vegetarian":23566,"##tell":23567,"guiana":23568,"opt":23569,"cavendish":23570,"lucknow":23571,"synthesized":23572,"##hani":23573,"##mada":23574,"finalized":23575,"##ctable":23576,"fictitious":23577,"mayoral":23578,"unreliable":23579,"##enham":23580,"embracing":23581,"peppers":23582,"rbis":23583,"##chio":23584,"##neo":23585,"inhibition":23586,"slashed":23587,"togo":23588,"orderly":23589,"embroidered":23590,"safari":23591,"salty":23592,"236":23593,"barron":23594,"benito":23595,"totaled":23596,"##dak":23597,"pubs":23598,"simulated":23599,"caden":23600,"devin":23601,"tolkien":23602,"momma":23603,"welding":23604,"sesame":23605,"##ept":23606,"gottingen":23607,"hardness":23608,"630":23609,"shaman":23610,"temeraire":23611,"620":23612,"adequately":23613,"pediatric":23614,"##kit":23615,"ck":23616,"assertion":23617,"radicals":23618,"composure":23619,"cadence":23620,"seafood":23621,"beaufort":23622,"lazarus":23623,"mani":23624,"warily":23625,"cunning":23626,"kurdistan":23627,"249":23628,"cantata":23629,"##kir":23630,"ares":23631,"##41":23632,"##clusive":23633,"nape":23634,"townland":23635,"geared":23636,"insulted":23637,"flutter":23638,"boating":23639,"violate":23640,"draper":23641,"dumping":23642,"malmo":23643,"##hh":23644,"##romatic":23645,"firearm":23646,"alta":23647,"bono":23648,"obscured":23649,"##clave":23650,"exceeds":23651,"panorama":23652,"unbelievable":23653,"##train":23654,"preschool":23655,"##essed":23656,"disconnected":23657,"installing":23658,"rescuing":23659,"secretaries":23660,"accessibility":23661,"##castle":23662,"##drive":23663,"##ifice":23664,"##film":23665,"bouts":23666,"slug":23667,"waterway":23668,"mindanao":23669,"##buro":23670,"##ratic":23671,"halves":23672,"##ل":23673,"calming":23674,"liter":23675,"maternity":23676,"adorable":23677,"bragg":23678,"electrification":23679,"mcc":23680,"##dote":23681,"roxy":23682,"schizophrenia":23683,"##body":23684,"munoz":23685,"kaye":23686,"whaling":23687,"239":23688,"mil":23689,"tingling":23690,"tolerant":23691,"##ago":23692,"unconventional":23693,"volcanoes":23694,"##finder":23695,"deportivo":23696,"##llie":23697,"robson":23698,"kaufman":23699,"neuroscience":23700,"wai":23701,"deportation":23702,"masovian":23703,"scraping":23704,"converse":23705,"##bh":23706,"hacking":23707,"bulge":23708,"##oun":23709,"administratively":23710,"yao":23711,"580":23712,"amp":23713,"mammoth":23714,"booster":23715,"claremont":23716,"hooper":23717,"nomenclature":23718,"pursuits":23719,"mclaughlin":23720,"melinda":23721,"##sul":23722,"catfish":23723,"barclay":23724,"substrates":23725,"taxa":23726,"zee":23727,"originals":23728,"kimberly":23729,"packets":23730,"padma":23731,"##ality":23732,"borrowing":23733,"ostensibly":23734,"solvent":23735,"##bri":23736,"##genesis":23737,"##mist":23738,"lukas":23739,"shreveport":23740,"veracruz":23741,"##ь":23742,"##lou":23743,"##wives":23744,"cheney":23745,"tt":23746,"anatolia":23747,"hobbs":23748,"##zyn":23749,"cyclic":23750,"radiant":23751,"alistair":23752,"greenish":23753,"siena":23754,"dat":23755,"independents":23756,"##bation":23757,"conform":23758,"pieter":23759,"hyper":23760,"applicant":23761,"bradshaw":23762,"spores":23763,"telangana":23764,"vinci":23765,"inexpensive":23766,"nuclei":23767,"322":23768,"jang":23769,"nme":23770,"soho":23771,"spd":23772,"##ign":23773,"cradled":23774,"receptionist":23775,"pow":23776,"##43":23777,"##rika":23778,"fascism":23779,"##ifer":23780,"experimenting":23781,"##ading":23782,"##iec":23783,"##region":23784,"345":23785,"jocelyn":23786,"maris":23787,"stair":23788,"nocturnal":23789,"toro":23790,"constabulary":23791,"elgin":23792,"##kker":23793,"msc":23794,"##giving":23795,"##schen":23796,"##rase":23797,"doherty":23798,"doping":23799,"sarcastically":23800,"batter":23801,"maneuvers":23802,"##cano":23803,"##apple":23804,"##gai":23805,"##git":23806,"intrinsic":23807,"##nst":23808,"##stor":23809,"1753":23810,"showtime":23811,"cafes":23812,"gasps":23813,"lviv":23814,"ushered":23815,"##thed":23816,"fours":23817,"restart":23818,"astonishment":23819,"transmitting":23820,"flyer":23821,"shrugs":23822,"##sau":23823,"intriguing":23824,"cones":23825,"dictated":23826,"mushrooms":23827,"medial":23828,"##kovsky":23829,"##elman":23830,"escorting":23831,"gaped":23832,"##26":23833,"godfather":23834,"##door":23835,"##sell":23836,"djs":23837,"recaptured":23838,"timetable":23839,"vila":23840,"1710":23841,"3a":23842,"aerodrome":23843,"mortals":23844,"scientology":23845,"##orne":23846,"angelina":23847,"mag":23848,"convection":23849,"unpaid":23850,"insertion":23851,"intermittent":23852,"lego":23853,"##nated":23854,"endeavor":23855,"kota":23856,"pereira":23857,"##lz":23858,"304":23859,"bwv":23860,"glamorgan":23861,"insults":23862,"agatha":23863,"fey":23864,"##cend":23865,"fleetwood":23866,"mahogany":23867,"protruding":23868,"steamship":23869,"zeta":23870,"##arty":23871,"mcguire":23872,"suspense":23873,"##sphere":23874,"advising":23875,"urges":23876,"##wala":23877,"hurriedly":23878,"meteor":23879,"gilded":23880,"inline":23881,"arroyo":23882,"stalker":23883,"##oge":23884,"excitedly":23885,"revered":23886,"##cure":23887,"earle":23888,"introductory":23889,"##break":23890,"##ilde":23891,"mutants":23892,"puff":23893,"pulses":23894,"reinforcement":23895,"##haling":23896,"curses":23897,"lizards":23898,"stalk":23899,"correlated":23900,"##fixed":23901,"fallout":23902,"macquarie":23903,"##unas":23904,"bearded":23905,"denton":23906,"heaving":23907,"802":23908,"##ocation":23909,"winery":23910,"assign":23911,"dortmund":23912,"##lkirk":23913,"everest":23914,"invariant":23915,"charismatic":23916,"susie":23917,"##elling":23918,"bled":23919,"lesley":23920,"telegram":23921,"sumner":23922,"bk":23923,"##ogen":23924,"##к":23925,"wilcox":23926,"needy":23927,"colbert":23928,"duval":23929,"##iferous":23930,"##mbled":23931,"allotted":23932,"attends":23933,"imperative":23934,"##hita":23935,"replacements":23936,"hawker":23937,"##inda":23938,"insurgency":23939,"##zee":23940,"##eke":23941,"casts":23942,"##yla":23943,"680":23944,"ives":23945,"transitioned":23946,"##pack":23947,"##powering":23948,"authoritative":23949,"baylor":23950,"flex":23951,"cringed":23952,"plaintiffs":23953,"woodrow":23954,"##skie":23955,"drastic":23956,"ape":23957,"aroma":23958,"unfolded":23959,"commotion":23960,"nt":23961,"preoccupied":23962,"theta":23963,"routines":23964,"lasers":23965,"privatization":23966,"wand":23967,"domino":23968,"ek":23969,"clenching":23970,"nsa":23971,"strategically":23972,"showered":23973,"bile":23974,"handkerchief":23975,"pere":23976,"storing":23977,"christophe":23978,"insulting":23979,"316":23980,"nakamura":23981,"romani":23982,"asiatic":23983,"magdalena":23984,"palma":23985,"cruises":23986,"stripping":23987,"405":23988,"konstantin":23989,"soaring":23990,"##berman":23991,"colloquially":23992,"forerunner":23993,"havilland":23994,"incarcerated":23995,"parasites":23996,"sincerity":23997,"##utus":23998,"disks":23999,"plank":24000,"saigon":24001,"##ining":24002,"corbin":24003,"homo":24004,"ornaments":24005,"powerhouse":24006,"##tlement":24007,"chong":24008,"fastened":24009,"feasibility":24010,"idf":24011,"morphological":24012,"usable":24013,"##nish":24014,"##zuki":24015,"aqueduct":24016,"jaguars":24017,"keepers":24018,"##flies":24019,"aleksandr":24020,"faust":24021,"assigns":24022,"ewing":24023,"bacterium":24024,"hurled":24025,"tricky":24026,"hungarians":24027,"integers":24028,"wallis":24029,"321":24030,"yamaha":24031,"##isha":24032,"hushed":24033,"oblivion":24034,"aviator":24035,"evangelist":24036,"friars":24037,"##eller":24038,"monograph":24039,"ode":24040,"##nary":24041,"airplanes":24042,"labourers":24043,"charms":24044,"##nee":24045,"1661":24046,"hagen":24047,"tnt":24048,"rudder":24049,"fiesta":24050,"transcript":24051,"dorothea":24052,"ska":24053,"inhibitor":24054,"maccabi":24055,"retorted":24056,"raining":24057,"encompassed":24058,"clauses":24059,"menacing":24060,"1642":24061,"lineman":24062,"##gist":24063,"vamps":24064,"##ape":24065,"##dick":24066,"gloom":24067,"##rera":24068,"dealings":24069,"easing":24070,"seekers":24071,"##nut":24072,"##pment":24073,"helens":24074,"unmanned":24075,"##anu":24076,"##isson":24077,"basics":24078,"##amy":24079,"##ckman":24080,"adjustments":24081,"1688":24082,"brutality":24083,"horne":24084,"##zell":24085,"sui":24086,"##55":24087,"##mable":24088,"aggregator":24089,"##thal":24090,"rhino":24091,"##drick":24092,"##vira":24093,"counters":24094,"zoom":24095,"##01":24096,"##rting":24097,"mn":24098,"montenegrin":24099,"packard":24100,"##unciation":24101,"##♭":24102,"##kki":24103,"reclaim":24104,"scholastic":24105,"thugs":24106,"pulsed":24107,"##icia":24108,"syriac":24109,"quan":24110,"saddam":24111,"banda":24112,"kobe":24113,"blaming":24114,"buddies":24115,"dissent":24116,"##lusion":24117,"##usia":24118,"corbett":24119,"jaya":24120,"delle":24121,"erratic":24122,"lexie":24123,"##hesis":24124,"435":24125,"amiga":24126,"hermes":24127,"##pressing":24128,"##leen":24129,"chapels":24130,"gospels":24131,"jamal":24132,"##uating":24133,"compute":24134,"revolving":24135,"warp":24136,"##sso":24137,"##thes":24138,"armory":24139,"##eras":24140,"##gol":24141,"antrim":24142,"loki":24143,"##kow":24144,"##asian":24145,"##good":24146,"##zano":24147,"braid":24148,"handwriting":24149,"subdistrict":24150,"funky":24151,"pantheon":24152,"##iculate":24153,"concurrency":24154,"estimation":24155,"improper":24156,"juliana":24157,"##his":24158,"newcomers":24159,"johnstone":24160,"staten":24161,"communicated":24162,"##oco":24163,"##alle":24164,"sausage":24165,"stormy":24166,"##stered":24167,"##tters":24168,"superfamily":24169,"##grade":24170,"acidic":24171,"collateral":24172,"tabloid":24173,"##oped":24174,"##rza":24175,"bladder":24176,"austen":24177,"##ellant":24178,"mcgraw":24179,"##hay":24180,"hannibal":24181,"mein":24182,"aquino":24183,"lucifer":24184,"wo":24185,"badger":24186,"boar":24187,"cher":24188,"christensen":24189,"greenberg":24190,"interruption":24191,"##kken":24192,"jem":24193,"244":24194,"mocked":24195,"bottoms":24196,"cambridgeshire":24197,"##lide":24198,"sprawling":24199,"##bbly":24200,"eastwood":24201,"ghent":24202,"synth":24203,"##buck":24204,"advisers":24205,"##bah":24206,"nominally":24207,"hapoel":24208,"qu":24209,"daggers":24210,"estranged":24211,"fabricated":24212,"towels":24213,"vinnie":24214,"wcw":24215,"misunderstanding":24216,"anglia":24217,"nothin":24218,"unmistakable":24219,"##dust":24220,"##lova":24221,"chilly":24222,"marquette":24223,"truss":24224,"##edge":24225,"##erine":24226,"reece":24227,"##lty":24228,"##chemist":24229,"##connected":24230,"272":24231,"308":24232,"41st":24233,"bash":24234,"raion":24235,"waterfalls":24236,"##ump":24237,"##main":24238,"labyrinth":24239,"queue":24240,"theorist":24241,"##istle":24242,"bharatiya":24243,"flexed":24244,"soundtracks":24245,"rooney":24246,"leftist":24247,"patrolling":24248,"wharton":24249,"plainly":24250,"alleviate":24251,"eastman":24252,"schuster":24253,"topographic":24254,"engages":24255,"immensely":24256,"unbearable":24257,"fairchild":24258,"1620":24259,"dona":24260,"lurking":24261,"parisian":24262,"oliveira":24263,"ia":24264,"indictment":24265,"hahn":24266,"bangladeshi":24267,"##aster":24268,"vivo":24269,"##uming":24270,"##ential":24271,"antonia":24272,"expects":24273,"indoors":24274,"kildare":24275,"harlan":24276,"##logue":24277,"##ogenic":24278,"##sities":24279,"forgiven":24280,"##wat":24281,"childish":24282,"tavi":24283,"##mide":24284,"##orra":24285,"plausible":24286,"grimm":24287,"successively":24288,"scooted":24289,"##bola":24290,"##dget":24291,"##rith":24292,"spartans":24293,"emery":24294,"flatly":24295,"azure":24296,"epilogue":24297,"##wark":24298,"flourish":24299,"##iny":24300,"##tracted":24301,"##overs":24302,"##oshi":24303,"bestseller":24304,"distressed":24305,"receipt":24306,"spitting":24307,"hermit":24308,"topological":24309,"##cot":24310,"drilled":24311,"subunit":24312,"francs":24313,"##layer":24314,"eel":24315,"##fk":24316,"##itas":24317,"octopus":24318,"footprint":24319,"petitions":24320,"ufo":24321,"##say":24322,"##foil":24323,"interfering":24324,"leaking":24325,"palo":24326,"##metry":24327,"thistle":24328,"valiant":24329,"##pic":24330,"narayan":24331,"mcpherson":24332,"##fast":24333,"gonzales":24334,"##ym":24335,"##enne":24336,"dustin":24337,"novgorod":24338,"solos":24339,"##zman":24340,"doin":24341,"##raph":24342,"##patient":24343,"##meyer":24344,"soluble":24345,"ashland":24346,"cuffs":24347,"carole":24348,"pendleton":24349,"whistling":24350,"vassal":24351,"##river":24352,"deviation":24353,"revisited":24354,"constituents":24355,"rallied":24356,"rotate":24357,"loomed":24358,"##eil":24359,"##nting":24360,"amateurs":24361,"augsburg":24362,"auschwitz":24363,"crowns":24364,"skeletons":24365,"##cona":24366,"bonnet":24367,"257":24368,"dummy":24369,"globalization":24370,"simeon":24371,"sleeper":24372,"mandal":24373,"differentiated":24374,"##crow":24375,"##mare":24376,"milne":24377,"bundled":24378,"exasperated":24379,"talmud":24380,"owes":24381,"segregated":24382,"##feng":24383,"##uary":24384,"dentist":24385,"piracy":24386,"props":24387,"##rang":24388,"devlin":24389,"##torium":24390,"malicious":24391,"paws":24392,"##laid":24393,"dependency":24394,"##ergy":24395,"##fers":24396,"##enna":24397,"258":24398,"pistons":24399,"rourke":24400,"jed":24401,"grammatical":24402,"tres":24403,"maha":24404,"wig":24405,"512":24406,"ghostly":24407,"jayne":24408,"##achal":24409,"##creen":24410,"##ilis":24411,"##lins":24412,"##rence":24413,"designate":24414,"##with":24415,"arrogance":24416,"cambodian":24417,"clones":24418,"showdown":24419,"throttle":24420,"twain":24421,"##ception":24422,"lobes":24423,"metz":24424,"nagoya":24425,"335":24426,"braking":24427,"##furt":24428,"385":24429,"roaming":24430,"##minster":24431,"amin":24432,"crippled":24433,"##37":24434,"##llary":24435,"indifferent":24436,"hoffmann":24437,"idols":24438,"intimidating":24439,"1751":24440,"261":24441,"influenza":24442,"memo":24443,"onions":24444,"1748":24445,"bandage":24446,"consciously":24447,"##landa":24448,"##rage":24449,"clandestine":24450,"observes":24451,"swiped":24452,"tangle":24453,"##ener":24454,"##jected":24455,"##trum":24456,"##bill":24457,"##lta":24458,"hugs":24459,"congresses":24460,"josiah":24461,"spirited":24462,"##dek":24463,"humanist":24464,"managerial":24465,"filmmaking":24466,"inmate":24467,"rhymes":24468,"debuting":24469,"grimsby":24470,"ur":24471,"##laze":24472,"duplicate":24473,"vigor":24474,"##tf":24475,"republished":24476,"bolshevik":24477,"refurbishment":24478,"antibiotics":24479,"martini":24480,"methane":24481,"newscasts":24482,"royale":24483,"horizons":24484,"levant":24485,"iain":24486,"visas":24487,"##ischen":24488,"paler":24489,"##around":24490,"manifestation":24491,"snuck":24492,"alf":24493,"chop":24494,"futile":24495,"pedestal":24496,"rehab":24497,"##kat":24498,"bmg":24499,"kerman":24500,"res":24501,"fairbanks":24502,"jarrett":24503,"abstraction":24504,"saharan":24505,"##zek":24506,"1746":24507,"procedural":24508,"clearer":24509,"kincaid":24510,"sash":24511,"luciano":24512,"##ffey":24513,"crunch":24514,"helmut":24515,"##vara":24516,"revolutionaries":24517,"##tute":24518,"creamy":24519,"leach":24520,"##mmon":24521,"1747":24522,"permitting":24523,"nes":24524,"plight":24525,"wendell":24526,"##lese":24527,"contra":24528,"ts":24529,"clancy":24530,"ipa":24531,"mach":24532,"staples":24533,"autopsy":24534,"disturbances":24535,"nueva":24536,"karin":24537,"pontiac":24538,"##uding":24539,"proxy":24540,"venerable":24541,"haunt":24542,"leto":24543,"bergman":24544,"expands":24545,"##helm":24546,"wal":24547,"##pipe":24548,"canning":24549,"celine":24550,"cords":24551,"obesity":24552,"##enary":24553,"intrusion":24554,"planner":24555,"##phate":24556,"reasoned":24557,"sequencing":24558,"307":24559,"harrow":24560,"##chon":24561,"##dora":24562,"marred":24563,"mcintyre":24564,"repay":24565,"tarzan":24566,"darting":24567,"248":24568,"harrisburg":24569,"margarita":24570,"repulsed":24571,"##hur":24572,"##lding":24573,"belinda":24574,"hamburger":24575,"novo":24576,"compliant":24577,"runways":24578,"bingham":24579,"registrar":24580,"skyscraper":24581,"ic":24582,"cuthbert":24583,"improvisation":24584,"livelihood":24585,"##corp":24586,"##elial":24587,"admiring":24588,"##dened":24589,"sporadic":24590,"believer":24591,"casablanca":24592,"popcorn":24593,"##29":24594,"asha":24595,"shovel":24596,"##bek":24597,"##dice":24598,"coiled":24599,"tangible":24600,"##dez":24601,"casper":24602,"elsie":24603,"resin":24604,"tenderness":24605,"rectory":24606,"##ivision":24607,"avail":24608,"sonar":24609,"##mori":24610,"boutique":24611,"##dier":24612,"guerre":24613,"bathed":24614,"upbringing":24615,"vaulted":24616,"sandals":24617,"blessings":24618,"##naut":24619,"##utnant":24620,"1680":24621,"306":24622,"foxes":24623,"pia":24624,"corrosion":24625,"hesitantly":24626,"confederates":24627,"crystalline":24628,"footprints":24629,"shapiro":24630,"tirana":24631,"valentin":24632,"drones":24633,"45th":24634,"microscope":24635,"shipments":24636,"texted":24637,"inquisition":24638,"wry":24639,"guernsey":24640,"unauthorized":24641,"resigning":24642,"760":24643,"ripple":24644,"schubert":24645,"stu":24646,"reassure":24647,"felony":24648,"##ardo":24649,"brittle":24650,"koreans":24651,"##havan":24652,"##ives":24653,"dun":24654,"implicit":24655,"tyres":24656,"##aldi":24657,"##lth":24658,"magnolia":24659,"##ehan":24660,"##puri":24661,"##poulos":24662,"aggressively":24663,"fei":24664,"gr":24665,"familiarity":24666,"##poo":24667,"indicative":24668,"##trust":24669,"fundamentally":24670,"jimmie":24671,"overrun":24672,"395":24673,"anchors":24674,"moans":24675,"##opus":24676,"britannia":24677,"armagh":24678,"##ggle":24679,"purposely":24680,"seizing":24681,"##vao":24682,"bewildered":24683,"mundane":24684,"avoidance":24685,"cosmopolitan":24686,"geometridae":24687,"quartermaster":24688,"caf":24689,"415":24690,"chatter":24691,"engulfed":24692,"gleam":24693,"purge":24694,"##icate":24695,"juliette":24696,"jurisprudence":24697,"guerra":24698,"revisions":24699,"##bn":24700,"casimir":24701,"brew":24702,"##jm":24703,"1749":24704,"clapton":24705,"cloudy":24706,"conde":24707,"hermitage":24708,"278":24709,"simulations":24710,"torches":24711,"vincenzo":24712,"matteo":24713,"##rill":24714,"hidalgo":24715,"booming":24716,"westbound":24717,"accomplishment":24718,"tentacles":24719,"unaffected":24720,"##sius":24721,"annabelle":24722,"flopped":24723,"sloping":24724,"##litz":24725,"dreamer":24726,"interceptor":24727,"vu":24728,"##loh":24729,"consecration":24730,"copying":24731,"messaging":24732,"breaker":24733,"climates":24734,"hospitalized":24735,"1752":24736,"torino":24737,"afternoons":24738,"winfield":24739,"witnessing":24740,"##teacher":24741,"breakers":24742,"choirs":24743,"sawmill":24744,"coldly":24745,"##ege":24746,"sipping":24747,"haste":24748,"uninhabited":24749,"conical":24750,"bibliography":24751,"pamphlets":24752,"severn":24753,"edict":24754,"##oca":24755,"deux":24756,"illnesses":24757,"grips":24758,"##pl":24759,"rehearsals":24760,"sis":24761,"thinkers":24762,"tame":24763,"##keepers":24764,"1690":24765,"acacia":24766,"reformer":24767,"##osed":24768,"##rys":24769,"shuffling":24770,"##iring":24771,"##shima":24772,"eastbound":24773,"ionic":24774,"rhea":24775,"flees":24776,"littered":24777,"##oum":24778,"rocker":24779,"vomiting":24780,"groaning":24781,"champ":24782,"overwhelmingly":24783,"civilizations":24784,"paces":24785,"sloop":24786,"adoptive":24787,"##tish":24788,"skaters":24789,"##vres":24790,"aiding":24791,"mango":24792,"##joy":24793,"nikola":24794,"shriek":24795,"##ignon":24796,"pharmaceuticals":24797,"##mg":24798,"tuna":24799,"calvert":24800,"gustavo":24801,"stocked":24802,"yearbook":24803,"##urai":24804,"##mana":24805,"computed":24806,"subsp":24807,"riff":24808,"hanoi":24809,"kelvin":24810,"hamid":24811,"moors":24812,"pastures":24813,"summons":24814,"jihad":24815,"nectar":24816,"##ctors":24817,"bayou":24818,"untitled":24819,"pleasing":24820,"vastly":24821,"republics":24822,"intellect":24823,"##η":24824,"##ulio":24825,"##tou":24826,"crumbling":24827,"stylistic":24828,"sb":24829,"##ی":24830,"consolation":24831,"frequented":24832,"h₂o":24833,"walden":24834,"widows":24835,"##iens":24836,"404":24837,"##ignment":24838,"chunks":24839,"improves":24840,"288":24841,"grit":24842,"recited":24843,"##dev":24844,"snarl":24845,"sociological":24846,"##arte":24847,"##gul":24848,"inquired":24849,"##held":24850,"bruise":24851,"clube":24852,"consultancy":24853,"homogeneous":24854,"hornets":24855,"multiplication":24856,"pasta":24857,"prick":24858,"savior":24859,"##grin":24860,"##kou":24861,"##phile":24862,"yoon":24863,"##gara":24864,"grimes":24865,"vanishing":24866,"cheering":24867,"reacting":24868,"bn":24869,"distillery":24870,"##quisite":24871,"##vity":24872,"coe":24873,"dockyard":24874,"massif":24875,"##jord":24876,"escorts":24877,"voss":24878,"##valent":24879,"byte":24880,"chopped":24881,"hawke":24882,"illusions":24883,"workings":24884,"floats":24885,"##koto":24886,"##vac":24887,"kv":24888,"annapolis":24889,"madden":24890,"##onus":24891,"alvaro":24892,"noctuidae":24893,"##cum":24894,"##scopic":24895,"avenge":24896,"steamboat":24897,"forte":24898,"illustrates":24899,"erika":24900,"##trip":24901,"570":24902,"dew":24903,"nationalities":24904,"bran":24905,"manifested":24906,"thirsty":24907,"diversified":24908,"muscled":24909,"reborn":24910,"##standing":24911,"arson":24912,"##lessness":24913,"##dran":24914,"##logram":24915,"##boys":24916,"##kushima":24917,"##vious":24918,"willoughby":24919,"##phobia":24920,"286":24921,"alsace":24922,"dashboard":24923,"yuki":24924,"##chai":24925,"granville":24926,"myspace":24927,"publicized":24928,"tricked":24929,"##gang":24930,"adjective":24931,"##ater":24932,"relic":24933,"reorganisation":24934,"enthusiastically":24935,"indications":24936,"saxe":24937,"##lassified":24938,"consolidate":24939,"iec":24940,"padua":24941,"helplessly":24942,"ramps":24943,"renaming":24944,"regulars":24945,"pedestrians":24946,"accents":24947,"convicts":24948,"inaccurate":24949,"lowers":24950,"mana":24951,"##pati":24952,"barrie":24953,"bjp":24954,"outta":24955,"someplace":24956,"berwick":24957,"flanking":24958,"invoked":24959,"marrow":24960,"sparsely":24961,"excerpts":24962,"clothed":24963,"rei":24964,"##ginal":24965,"wept":24966,"##straße":24967,"##vish":24968,"alexa":24969,"excel":24970,"##ptive":24971,"membranes":24972,"aquitaine":24973,"creeks":24974,"cutler":24975,"sheppard":24976,"implementations":24977,"ns":24978,"##dur":24979,"fragrance":24980,"budge":24981,"concordia":24982,"magnesium":24983,"marcelo":24984,"##antes":24985,"gladly":24986,"vibrating":24987,"##rral":24988,"##ggles":24989,"montrose":24990,"##omba":24991,"lew":24992,"seamus":24993,"1630":24994,"cocky":24995,"##ament":24996,"##uen":24997,"bjorn":24998,"##rrick":24999,"fielder":25000,"fluttering":25001,"##lase":25002,"methyl":25003,"kimberley":25004,"mcdowell":25005,"reductions":25006,"barbed":25007,"##jic":25008,"##tonic":25009,"aeronautical":25010,"condensed":25011,"distracting":25012,"##promising":25013,"huffed":25014,"##cala":25015,"##sle":25016,"claudius":25017,"invincible":25018,"missy":25019,"pious":25020,"balthazar":25021,"ci":25022,"##lang":25023,"butte":25024,"combo":25025,"orson":25026,"##dication":25027,"myriad":25028,"1707":25029,"silenced":25030,"##fed":25031,"##rh":25032,"coco":25033,"netball":25034,"yourselves":25035,"##oza":25036,"clarify":25037,"heller":25038,"peg":25039,"durban":25040,"etudes":25041,"offender":25042,"roast":25043,"blackmail":25044,"curvature":25045,"##woods":25046,"vile":25047,"309":25048,"illicit":25049,"suriname":25050,"##linson":25051,"overture":25052,"1685":25053,"bubbling":25054,"gymnast":25055,"tucking":25056,"##mming":25057,"##ouin":25058,"maldives":25059,"##bala":25060,"gurney":25061,"##dda":25062,"##eased":25063,"##oides":25064,"backside":25065,"pinto":25066,"jars":25067,"racehorse":25068,"tending":25069,"##rdial":25070,"baronetcy":25071,"wiener":25072,"duly":25073,"##rke":25074,"barbarian":25075,"cupping":25076,"flawed":25077,"##thesis":25078,"bertha":25079,"pleistocene":25080,"puddle":25081,"swearing":25082,"##nob":25083,"##tically":25084,"fleeting":25085,"prostate":25086,"amulet":25087,"educating":25088,"##mined":25089,"##iti":25090,"##tler":25091,"75th":25092,"jens":25093,"respondents":25094,"analytics":25095,"cavaliers":25096,"papacy":25097,"raju":25098,"##iente":25099,"##ulum":25100,"##tip":25101,"funnel":25102,"271":25103,"disneyland":25104,"##lley":25105,"sociologist":25106,"##iam":25107,"2500":25108,"faulkner":25109,"louvre":25110,"menon":25111,"##dson":25112,"276":25113,"##ower":25114,"afterlife":25115,"mannheim":25116,"peptide":25117,"referees":25118,"comedians":25119,"meaningless":25120,"##anger":25121,"##laise":25122,"fabrics":25123,"hurley":25124,"renal":25125,"sleeps":25126,"##bour":25127,"##icle":25128,"breakout":25129,"kristin":25130,"roadside":25131,"animator":25132,"clover":25133,"disdain":25134,"unsafe":25135,"redesign":25136,"##urity":25137,"firth":25138,"barnsley":25139,"portage":25140,"reset":25141,"narrows":25142,"268":25143,"commandos":25144,"expansive":25145,"speechless":25146,"tubular":25147,"##lux":25148,"essendon":25149,"eyelashes":25150,"smashwords":25151,"##yad":25152,"##bang":25153,"##claim":25154,"craved":25155,"sprinted":25156,"chet":25157,"somme":25158,"astor":25159,"wrocław":25160,"orton":25161,"266":25162,"bane":25163,"##erving":25164,"##uing":25165,"mischief":25166,"##amps":25167,"##sund":25168,"scaling":25169,"terre":25170,"##xious":25171,"impairment":25172,"offenses":25173,"undermine":25174,"moi":25175,"soy":25176,"contiguous":25177,"arcadia":25178,"inuit":25179,"seam":25180,"##tops":25181,"macbeth":25182,"rebelled":25183,"##icative":25184,"##iot":25185,"590":25186,"elaborated":25187,"frs":25188,"uniformed":25189,"##dberg":25190,"259":25191,"powerless":25192,"priscilla":25193,"stimulated":25194,"980":25195,"qc":25196,"arboretum":25197,"frustrating":25198,"trieste":25199,"bullock":25200,"##nified":25201,"enriched":25202,"glistening":25203,"intern":25204,"##adia":25205,"locus":25206,"nouvelle":25207,"ollie":25208,"ike":25209,"lash":25210,"starboard":25211,"ee":25212,"tapestry":25213,"headlined":25214,"hove":25215,"rigged":25216,"##vite":25217,"pollock":25218,"##yme":25219,"thrive":25220,"clustered":25221,"cas":25222,"roi":25223,"gleamed":25224,"olympiad":25225,"##lino":25226,"pressured":25227,"regimes":25228,"##hosis":25229,"##lick":25230,"ripley":25231,"##ophone":25232,"kickoff":25233,"gallon":25234,"rockwell":25235,"##arable":25236,"crusader":25237,"glue":25238,"revolutions":25239,"scrambling":25240,"1714":25241,"grover":25242,"##jure":25243,"englishman":25244,"aztec":25245,"263":25246,"contemplating":25247,"coven":25248,"ipad":25249,"preach":25250,"triumphant":25251,"tufts":25252,"##esian":25253,"rotational":25254,"##phus":25255,"328":25256,"falkland":25257,"##brates":25258,"strewn":25259,"clarissa":25260,"rejoin":25261,"environmentally":25262,"glint":25263,"banded":25264,"drenched":25265,"moat":25266,"albanians":25267,"johor":25268,"rr":25269,"maestro":25270,"malley":25271,"nouveau":25272,"shaded":25273,"taxonomy":25274,"v6":25275,"adhere":25276,"bunk":25277,"airfields":25278,"##ritan":25279,"1741":25280,"encompass":25281,"remington":25282,"tran":25283,"##erative":25284,"amelie":25285,"mazda":25286,"friar":25287,"morals":25288,"passions":25289,"##zai":25290,"breadth":25291,"vis":25292,"##hae":25293,"argus":25294,"burnham":25295,"caressing":25296,"insider":25297,"rudd":25298,"##imov":25299,"##mini":25300,"##rso":25301,"italianate":25302,"murderous":25303,"textual":25304,"wainwright":25305,"armada":25306,"bam":25307,"weave":25308,"timer":25309,"##taken":25310,"##nh":25311,"fra":25312,"##crest":25313,"ardent":25314,"salazar":25315,"taps":25316,"tunis":25317,"##ntino":25318,"allegro":25319,"gland":25320,"philanthropic":25321,"##chester":25322,"implication":25323,"##optera":25324,"esq":25325,"judas":25326,"noticeably":25327,"wynn":25328,"##dara":25329,"inched":25330,"indexed":25331,"crises":25332,"villiers":25333,"bandit":25334,"royalties":25335,"patterned":25336,"cupboard":25337,"interspersed":25338,"accessory":25339,"isla":25340,"kendrick":25341,"entourage":25342,"stitches":25343,"##esthesia":25344,"headwaters":25345,"##ior":25346,"interlude":25347,"distraught":25348,"draught":25349,"1727":25350,"##basket":25351,"biased":25352,"sy":25353,"transient":25354,"triad":25355,"subgenus":25356,"adapting":25357,"kidd":25358,"shortstop":25359,"##umatic":25360,"dimly":25361,"spiked":25362,"mcleod":25363,"reprint":25364,"nellie":25365,"pretoria":25366,"windmill":25367,"##cek":25368,"singled":25369,"##mps":25370,"273":25371,"reunite":25372,"##orous":25373,"747":25374,"bankers":25375,"outlying":25376,"##omp":25377,"##ports":25378,"##tream":25379,"apologies":25380,"cosmetics":25381,"patsy":25382,"##deh":25383,"##ocks":25384,"##yson":25385,"bender":25386,"nantes":25387,"serene":25388,"##nad":25389,"lucha":25390,"mmm":25391,"323":25392,"##cius":25393,"##gli":25394,"cmll":25395,"coinage":25396,"nestor":25397,"juarez":25398,"##rook":25399,"smeared":25400,"sprayed":25401,"twitching":25402,"sterile":25403,"irina":25404,"embodied":25405,"juveniles":25406,"enveloped":25407,"miscellaneous":25408,"cancers":25409,"dq":25410,"gulped":25411,"luisa":25412,"crested":25413,"swat":25414,"donegal":25415,"ref":25416,"##anov":25417,"##acker":25418,"hearst":25419,"mercantile":25420,"##lika":25421,"doorbell":25422,"ua":25423,"vicki":25424,"##alla":25425,"##som":25426,"bilbao":25427,"psychologists":25428,"stryker":25429,"sw":25430,"horsemen":25431,"turkmenistan":25432,"wits":25433,"##national":25434,"anson":25435,"mathew":25436,"screenings":25437,"##umb":25438,"rihanna":25439,"##agne":25440,"##nessy":25441,"aisles":25442,"##iani":25443,"##osphere":25444,"hines":25445,"kenton":25446,"saskatoon":25447,"tasha":25448,"truncated":25449,"##champ":25450,"##itan":25451,"mildred":25452,"advises":25453,"fredrik":25454,"interpreting":25455,"inhibitors":25456,"##athi":25457,"spectroscopy":25458,"##hab":25459,"##kong":25460,"karim":25461,"panda":25462,"##oia":25463,"##nail":25464,"##vc":25465,"conqueror":25466,"kgb":25467,"leukemia":25468,"##dity":25469,"arrivals":25470,"cheered":25471,"pisa":25472,"phosphorus":25473,"shielded":25474,"##riated":25475,"mammal":25476,"unitarian":25477,"urgently":25478,"chopin":25479,"sanitary":25480,"##mission":25481,"spicy":25482,"drugged":25483,"hinges":25484,"##tort":25485,"tipping":25486,"trier":25487,"impoverished":25488,"westchester":25489,"##caster":25490,"267":25491,"epoch":25492,"nonstop":25493,"##gman":25494,"##khov":25495,"aromatic":25496,"centrally":25497,"cerro":25498,"##tively":25499,"##vio":25500,"billions":25501,"modulation":25502,"sedimentary":25503,"283":25504,"facilitating":25505,"outrageous":25506,"goldstein":25507,"##eak":25508,"##kt":25509,"ld":25510,"maitland":25511,"penultimate":25512,"pollard":25513,"##dance":25514,"fleets":25515,"spaceship":25516,"vertebrae":25517,"##nig":25518,"alcoholism":25519,"als":25520,"recital":25521,"##bham":25522,"##ference":25523,"##omics":25524,"m2":25525,"##bm":25526,"trois":25527,"##tropical":25528,"##в":25529,"commemorates":25530,"##meric":25531,"marge":25532,"##raction":25533,"1643":25534,"670":25535,"cosmetic":25536,"ravaged":25537,"##ige":25538,"catastrophe":25539,"eng":25540,"##shida":25541,"albrecht":25542,"arterial":25543,"bellamy":25544,"decor":25545,"harmon":25546,"##rde":25547,"bulbs":25548,"synchronized":25549,"vito":25550,"easiest":25551,"shetland":25552,"shielding":25553,"wnba":25554,"##glers":25555,"##ssar":25556,"##riam":25557,"brianna":25558,"cumbria":25559,"##aceous":25560,"##rard":25561,"cores":25562,"thayer":25563,"##nsk":25564,"brood":25565,"hilltop":25566,"luminous":25567,"carts":25568,"keynote":25569,"larkin":25570,"logos":25571,"##cta":25572,"##ا":25573,"##mund":25574,"##quay":25575,"lilith":25576,"tinted":25577,"277":25578,"wrestle":25579,"mobilization":25580,"##uses":25581,"sequential":25582,"siam":25583,"bloomfield":25584,"takahashi":25585,"274":25586,"##ieving":25587,"presenters":25588,"ringo":25589,"blazed":25590,"witty":25591,"##oven":25592,"##ignant":25593,"devastation":25594,"haydn":25595,"harmed":25596,"newt":25597,"therese":25598,"##peed":25599,"gershwin":25600,"molina":25601,"rabbis":25602,"sudanese":25603,"001":25604,"innate":25605,"restarted":25606,"##sack":25607,"##fus":25608,"slices":25609,"wb":25610,"##shah":25611,"enroll":25612,"hypothetical":25613,"hysterical":25614,"1743":25615,"fabio":25616,"indefinite":25617,"warped":25618,"##hg":25619,"exchanging":25620,"525":25621,"unsuitable":25622,"##sboro":25623,"gallo":25624,"1603":25625,"bret":25626,"cobalt":25627,"homemade":25628,"##hunter":25629,"mx":25630,"operatives":25631,"##dhar":25632,"terraces":25633,"durable":25634,"latch":25635,"pens":25636,"whorls":25637,"##ctuated":25638,"##eaux":25639,"billing":25640,"ligament":25641,"succumbed":25642,"##gly":25643,"regulators":25644,"spawn":25645,"##brick":25646,"##stead":25647,"filmfare":25648,"rochelle":25649,"##nzo":25650,"1725":25651,"circumstance":25652,"saber":25653,"supplements":25654,"##nsky":25655,"##tson":25656,"crowe":25657,"wellesley":25658,"carrot":25659,"##9th":25660,"##movable":25661,"primate":25662,"drury":25663,"sincerely":25664,"topical":25665,"##mad":25666,"##rao":25667,"callahan":25668,"kyiv":25669,"smarter":25670,"tits":25671,"undo":25672,"##yeh":25673,"announcements":25674,"anthologies":25675,"barrio":25676,"nebula":25677,"##islaus":25678,"##shaft":25679,"##tyn":25680,"bodyguards":25681,"2021":25682,"assassinate":25683,"barns":25684,"emmett":25685,"scully":25686,"##mah":25687,"##yd":25688,"##eland":25689,"##tino":25690,"##itarian":25691,"demoted":25692,"gorman":25693,"lashed":25694,"prized":25695,"adventist":25696,"writ":25697,"##gui":25698,"alla":25699,"invertebrates":25700,"##ausen":25701,"1641":25702,"amman":25703,"1742":25704,"align":25705,"healy":25706,"redistribution":25707,"##gf":25708,"##rize":25709,"insulation":25710,"##drop":25711,"adherents":25712,"hezbollah":25713,"vitro":25714,"ferns":25715,"yanking":25716,"269":25717,"php":25718,"registering":25719,"uppsala":25720,"cheerleading":25721,"confines":25722,"mischievous":25723,"tully":25724,"##ross":25725,"49th":25726,"docked":25727,"roam":25728,"stipulated":25729,"pumpkin":25730,"##bry":25731,"prompt":25732,"##ezer":25733,"blindly":25734,"shuddering":25735,"craftsmen":25736,"frail":25737,"scented":25738,"katharine":25739,"scramble":25740,"shaggy":25741,"sponge":25742,"helix":25743,"zaragoza":25744,"279":25745,"##52":25746,"43rd":25747,"backlash":25748,"fontaine":25749,"seizures":25750,"posse":25751,"cowan":25752,"nonfiction":25753,"telenovela":25754,"wwii":25755,"hammered":25756,"undone":25757,"##gpur":25758,"encircled":25759,"irs":25760,"##ivation":25761,"artefacts":25762,"oneself":25763,"searing":25764,"smallpox":25765,"##belle":25766,"##osaurus":25767,"shandong":25768,"breached":25769,"upland":25770,"blushing":25771,"rankin":25772,"infinitely":25773,"psyche":25774,"tolerated":25775,"docking":25776,"evicted":25777,"##col":25778,"unmarked":25779,"##lving":25780,"gnome":25781,"lettering":25782,"litres":25783,"musique":25784,"##oint":25785,"benevolent":25786,"##jal":25787,"blackened":25788,"##anna":25789,"mccall":25790,"racers":25791,"tingle":25792,"##ocene":25793,"##orestation":25794,"introductions":25795,"radically":25796,"292":25797,"##hiff":25798,"##باد":25799,"1610":25800,"1739":25801,"munchen":25802,"plead":25803,"##nka":25804,"condo":25805,"scissors":25806,"##sight":25807,"##tens":25808,"apprehension":25809,"##cey":25810,"##yin":25811,"hallmark":25812,"watering":25813,"formulas":25814,"sequels":25815,"##llas":25816,"aggravated":25817,"bae":25818,"commencing":25819,"##building":25820,"enfield":25821,"prohibits":25822,"marne":25823,"vedic":25824,"civilized":25825,"euclidean":25826,"jagger":25827,"beforehand":25828,"blasts":25829,"dumont":25830,"##arney":25831,"##nem":25832,"740":25833,"conversions":25834,"hierarchical":25835,"rios":25836,"simulator":25837,"##dya":25838,"##lellan":25839,"hedges":25840,"oleg":25841,"thrusts":25842,"shadowed":25843,"darby":25844,"maximize":25845,"1744":25846,"gregorian":25847,"##nded":25848,"##routed":25849,"sham":25850,"unspecified":25851,"##hog":25852,"emory":25853,"factual":25854,"##smo":25855,"##tp":25856,"fooled":25857,"##rger":25858,"ortega":25859,"wellness":25860,"marlon":25861,"##oton":25862,"##urance":25863,"casket":25864,"keating":25865,"ley":25866,"enclave":25867,"##ayan":25868,"char":25869,"influencing":25870,"jia":25871,"##chenko":25872,"412":25873,"ammonia":25874,"erebidae":25875,"incompatible":25876,"violins":25877,"cornered":25878,"##arat":25879,"grooves":25880,"astronauts":25881,"columbian":25882,"rampant":25883,"fabrication":25884,"kyushu":25885,"mahmud":25886,"vanish":25887,"##dern":25888,"mesopotamia":25889,"##lete":25890,"ict":25891,"##rgen":25892,"caspian":25893,"kenji":25894,"pitted":25895,"##vered":25896,"999":25897,"grimace":25898,"roanoke":25899,"tchaikovsky":25900,"twinned":25901,"##analysis":25902,"##awan":25903,"xinjiang":25904,"arias":25905,"clemson":25906,"kazakh":25907,"sizable":25908,"1662":25909,"##khand":25910,"##vard":25911,"plunge":25912,"tatum":25913,"vittorio":25914,"##nden":25915,"cholera":25916,"##dana":25917,"##oper":25918,"bracing":25919,"indifference":25920,"projectile":25921,"superliga":25922,"##chee":25923,"realises":25924,"upgrading":25925,"299":25926,"porte":25927,"retribution":25928,"##vies":25929,"nk":25930,"stil":25931,"##resses":25932,"ama":25933,"bureaucracy":25934,"blackberry":25935,"bosch":25936,"testosterone":25937,"collapses":25938,"greer":25939,"##pathic":25940,"ioc":25941,"fifties":25942,"malls":25943,"##erved":25944,"bao":25945,"baskets":25946,"adolescents":25947,"siegfried":25948,"##osity":25949,"##tosis":25950,"mantra":25951,"detecting":25952,"existent":25953,"fledgling":25954,"##cchi":25955,"dissatisfied":25956,"gan":25957,"telecommunication":25958,"mingled":25959,"sobbed":25960,"6000":25961,"controversies":25962,"outdated":25963,"taxis":25964,"##raus":25965,"fright":25966,"slams":25967,"##lham":25968,"##fect":25969,"##tten":25970,"detectors":25971,"fetal":25972,"tanned":25973,"##uw":25974,"fray":25975,"goth":25976,"olympian":25977,"skipping":25978,"mandates":25979,"scratches":25980,"sheng":25981,"unspoken":25982,"hyundai":25983,"tracey":25984,"hotspur":25985,"restrictive":25986,"##buch":25987,"americana":25988,"mundo":25989,"##bari":25990,"burroughs":25991,"diva":25992,"vulcan":25993,"##6th":25994,"distinctions":25995,"thumping":25996,"##ngen":25997,"mikey":25998,"sheds":25999,"fide":26000,"rescues":26001,"springsteen":26002,"vested":26003,"valuation":26004,"##ece":26005,"##ely":26006,"pinnacle":26007,"rake":26008,"sylvie":26009,"##edo":26010,"almond":26011,"quivering":26012,"##irus":26013,"alteration":26014,"faltered":26015,"##wad":26016,"51st":26017,"hydra":26018,"ticked":26019,"##kato":26020,"recommends":26021,"##dicated":26022,"antigua":26023,"arjun":26024,"stagecoach":26025,"wilfred":26026,"trickle":26027,"pronouns":26028,"##pon":26029,"aryan":26030,"nighttime":26031,"##anian":26032,"gall":26033,"pea":26034,"stitch":26035,"##hei":26036,"leung":26037,"milos":26038,"##dini":26039,"eritrea":26040,"nexus":26041,"starved":26042,"snowfall":26043,"kant":26044,"parasitic":26045,"cot":26046,"discus":26047,"hana":26048,"strikers":26049,"appleton":26050,"kitchens":26051,"##erina":26052,"##partisan":26053,"##itha":26054,"##vius":26055,"disclose":26056,"metis":26057,"##channel":26058,"1701":26059,"tesla":26060,"##vera":26061,"fitch":26062,"1735":26063,"blooded":26064,"##tila":26065,"decimal":26066,"##tang":26067,"##bai":26068,"cyclones":26069,"eun":26070,"bottled":26071,"peas":26072,"pensacola":26073,"basha":26074,"bolivian":26075,"crabs":26076,"boil":26077,"lanterns":26078,"partridge":26079,"roofed":26080,"1645":26081,"necks":26082,"##phila":26083,"opined":26084,"patting":26085,"##kla":26086,"##lland":26087,"chuckles":26088,"volta":26089,"whereupon":26090,"##nche":26091,"devout":26092,"euroleague":26093,"suicidal":26094,"##dee":26095,"inherently":26096,"involuntary":26097,"knitting":26098,"nasser":26099,"##hide":26100,"puppets":26101,"colourful":26102,"courageous":26103,"southend":26104,"stills":26105,"miraculous":26106,"hodgson":26107,"richer":26108,"rochdale":26109,"ethernet":26110,"greta":26111,"uniting":26112,"prism":26113,"umm":26114,"##haya":26115,"##itical":26116,"##utation":26117,"deterioration":26118,"pointe":26119,"prowess":26120,"##ropriation":26121,"lids":26122,"scranton":26123,"billings":26124,"subcontinent":26125,"##koff":26126,"##scope":26127,"brute":26128,"kellogg":26129,"psalms":26130,"degraded":26131,"##vez":26132,"stanisław":26133,"##ructured":26134,"ferreira":26135,"pun":26136,"astonishing":26137,"gunnar":26138,"##yat":26139,"arya":26140,"prc":26141,"gottfried":26142,"##tight":26143,"excursion":26144,"##ographer":26145,"dina":26146,"##quil":26147,"##nare":26148,"huffington":26149,"illustrious":26150,"wilbur":26151,"gundam":26152,"verandah":26153,"##zard":26154,"naacp":26155,"##odle":26156,"constructive":26157,"fjord":26158,"kade":26159,"##naud":26160,"generosity":26161,"thrilling":26162,"baseline":26163,"cayman":26164,"frankish":26165,"plastics":26166,"accommodations":26167,"zoological":26168,"##fting":26169,"cedric":26170,"qb":26171,"motorized":26172,"##dome":26173,"##otted":26174,"squealed":26175,"tackled":26176,"canucks":26177,"budgets":26178,"situ":26179,"asthma":26180,"dail":26181,"gabled":26182,"grasslands":26183,"whimpered":26184,"writhing":26185,"judgments":26186,"##65":26187,"minnie":26188,"pv":26189,"##carbon":26190,"bananas":26191,"grille":26192,"domes":26193,"monique":26194,"odin":26195,"maguire":26196,"markham":26197,"tierney":26198,"##estra":26199,"##chua":26200,"libel":26201,"poke":26202,"speedy":26203,"atrium":26204,"laval":26205,"notwithstanding":26206,"##edly":26207,"fai":26208,"kala":26209,"##sur":26210,"robb":26211,"##sma":26212,"listings":26213,"luz":26214,"supplementary":26215,"tianjin":26216,"##acing":26217,"enzo":26218,"jd":26219,"ric":26220,"scanner":26221,"croats":26222,"transcribed":26223,"##49":26224,"arden":26225,"cv":26226,"##hair":26227,"##raphy":26228,"##lver":26229,"##uy":26230,"357":26231,"seventies":26232,"staggering":26233,"alam":26234,"horticultural":26235,"hs":26236,"regression":26237,"timbers":26238,"blasting":26239,"##ounded":26240,"montagu":26241,"manipulating":26242,"##cit":26243,"catalytic":26244,"1550":26245,"troopers":26246,"##meo":26247,"condemnation":26248,"fitzpatrick":26249,"##oire":26250,"##roved":26251,"inexperienced":26252,"1670":26253,"castes":26254,"##lative":26255,"outing":26256,"314":26257,"dubois":26258,"flicking":26259,"quarrel":26260,"ste":26261,"learners":26262,"1625":26263,"iq":26264,"whistled":26265,"##class":26266,"282":26267,"classify":26268,"tariffs":26269,"temperament":26270,"355":26271,"folly":26272,"liszt":26273,"##yles":26274,"immersed":26275,"jordanian":26276,"ceasefire":26277,"apparel":26278,"extras":26279,"maru":26280,"fished":26281,"##bio":26282,"harta":26283,"stockport":26284,"assortment":26285,"craftsman":26286,"paralysis":26287,"transmitters":26288,"##cola":26289,"blindness":26290,"##wk":26291,"fatally":26292,"proficiency":26293,"solemnly":26294,"##orno":26295,"repairing":26296,"amore":26297,"groceries":26298,"ultraviolet":26299,"##chase":26300,"schoolhouse":26301,"##tua":26302,"resurgence":26303,"nailed":26304,"##otype":26305,"##×":26306,"ruse":26307,"saliva":26308,"diagrams":26309,"##tructing":26310,"albans":26311,"rann":26312,"thirties":26313,"1b":26314,"antennas":26315,"hilarious":26316,"cougars":26317,"paddington":26318,"stats":26319,"##eger":26320,"breakaway":26321,"ipod":26322,"reza":26323,"authorship":26324,"prohibiting":26325,"scoffed":26326,"##etz":26327,"##ttle":26328,"conscription":26329,"defected":26330,"trondheim":26331,"##fires":26332,"ivanov":26333,"keenan":26334,"##adan":26335,"##ciful":26336,"##fb":26337,"##slow":26338,"locating":26339,"##ials":26340,"##tford":26341,"cadiz":26342,"basalt":26343,"blankly":26344,"interned":26345,"rags":26346,"rattling":26347,"##tick":26348,"carpathian":26349,"reassured":26350,"sync":26351,"bum":26352,"guildford":26353,"iss":26354,"staunch":26355,"##onga":26356,"astronomers":26357,"sera":26358,"sofie":26359,"emergencies":26360,"susquehanna":26361,"##heard":26362,"duc":26363,"mastery":26364,"vh1":26365,"williamsburg":26366,"bayer":26367,"buckled":26368,"craving":26369,"##khan":26370,"##rdes":26371,"bloomington":26372,"##write":26373,"alton":26374,"barbecue":26375,"##bians":26376,"justine":26377,"##hri":26378,"##ndt":26379,"delightful":26380,"smartphone":26381,"newtown":26382,"photon":26383,"retrieval":26384,"peugeot":26385,"hissing":26386,"##monium":26387,"##orough":26388,"flavors":26389,"lighted":26390,"relaunched":26391,"tainted":26392,"##games":26393,"##lysis":26394,"anarchy":26395,"microscopic":26396,"hopping":26397,"adept":26398,"evade":26399,"evie":26400,"##beau":26401,"inhibit":26402,"sinn":26403,"adjustable":26404,"hurst":26405,"intuition":26406,"wilton":26407,"cisco":26408,"44th":26409,"lawful":26410,"lowlands":26411,"stockings":26412,"thierry":26413,"##dalen":26414,"##hila":26415,"##nai":26416,"fates":26417,"prank":26418,"tb":26419,"maison":26420,"lobbied":26421,"provocative":26422,"1724":26423,"4a":26424,"utopia":26425,"##qual":26426,"carbonate":26427,"gujarati":26428,"purcell":26429,"##rford":26430,"curtiss":26431,"##mei":26432,"overgrown":26433,"arenas":26434,"mediation":26435,"swallows":26436,"##rnik":26437,"respectful":26438,"turnbull":26439,"##hedron":26440,"##hope":26441,"alyssa":26442,"ozone":26443,"##ʻi":26444,"ami":26445,"gestapo":26446,"johansson":26447,"snooker":26448,"canteen":26449,"cuff":26450,"declines":26451,"empathy":26452,"stigma":26453,"##ags":26454,"##iner":26455,"##raine":26456,"taxpayers":26457,"gui":26458,"volga":26459,"##wright":26460,"##copic":26461,"lifespan":26462,"overcame":26463,"tattooed":26464,"enactment":26465,"giggles":26466,"##ador":26467,"##camp":26468,"barrington":26469,"bribe":26470,"obligatory":26471,"orbiting":26472,"peng":26473,"##enas":26474,"elusive":26475,"sucker":26476,"##vating":26477,"cong":26478,"hardship":26479,"empowered":26480,"anticipating":26481,"estrada":26482,"cryptic":26483,"greasy":26484,"detainees":26485,"planck":26486,"sudbury":26487,"plaid":26488,"dod":26489,"marriott":26490,"kayla":26491,"##ears":26492,"##vb":26493,"##zd":26494,"mortally":26495,"##hein":26496,"cognition":26497,"radha":26498,"319":26499,"liechtenstein":26500,"meade":26501,"richly":26502,"argyle":26503,"harpsichord":26504,"liberalism":26505,"trumpets":26506,"lauded":26507,"tyrant":26508,"salsa":26509,"tiled":26510,"lear":26511,"promoters":26512,"reused":26513,"slicing":26514,"trident":26515,"##chuk":26516,"##gami":26517,"##lka":26518,"cantor":26519,"checkpoint":26520,"##points":26521,"gaul":26522,"leger":26523,"mammalian":26524,"##tov":26525,"##aar":26526,"##schaft":26527,"doha":26528,"frenchman":26529,"nirvana":26530,"##vino":26531,"delgado":26532,"headlining":26533,"##eron":26534,"##iography":26535,"jug":26536,"tko":26537,"1649":26538,"naga":26539,"intersections":26540,"##jia":26541,"benfica":26542,"nawab":26543,"##suka":26544,"ashford":26545,"gulp":26546,"##deck":26547,"##vill":26548,"##rug":26549,"brentford":26550,"frazier":26551,"pleasures":26552,"dunne":26553,"potsdam":26554,"shenzhen":26555,"dentistry":26556,"##tec":26557,"flanagan":26558,"##dorff":26559,"##hear":26560,"chorale":26561,"dinah":26562,"prem":26563,"quezon":26564,"##rogated":26565,"relinquished":26566,"sutra":26567,"terri":26568,"##pani":26569,"flaps":26570,"##rissa":26571,"poly":26572,"##rnet":26573,"homme":26574,"aback":26575,"##eki":26576,"linger":26577,"womb":26578,"##kson":26579,"##lewood":26580,"doorstep":26581,"orthodoxy":26582,"threaded":26583,"westfield":26584,"##rval":26585,"dioceses":26586,"fridays":26587,"subsided":26588,"##gata":26589,"loyalists":26590,"##biotic":26591,"##ettes":26592,"letterman":26593,"lunatic":26594,"prelate":26595,"tenderly":26596,"invariably":26597,"souza":26598,"thug":26599,"winslow":26600,"##otide":26601,"furlongs":26602,"gogh":26603,"jeopardy":26604,"##runa":26605,"pegasus":26606,"##umble":26607,"humiliated":26608,"standalone":26609,"tagged":26610,"##roller":26611,"freshmen":26612,"klan":26613,"##bright":26614,"attaining":26615,"initiating":26616,"transatlantic":26617,"logged":26618,"viz":26619,"##uance":26620,"1723":26621,"combatants":26622,"intervening":26623,"stephane":26624,"chieftain":26625,"despised":26626,"grazed":26627,"317":26628,"cdc":26629,"galveston":26630,"godzilla":26631,"macro":26632,"simulate":26633,"##planes":26634,"parades":26635,"##esses":26636,"960":26637,"##ductive":26638,"##unes":26639,"equator":26640,"overdose":26641,"##cans":26642,"##hosh":26643,"##lifting":26644,"joshi":26645,"epstein":26646,"sonora":26647,"treacherous":26648,"aquatics":26649,"manchu":26650,"responsive":26651,"##sation":26652,"supervisory":26653,"##christ":26654,"##llins":26655,"##ibar":26656,"##balance":26657,"##uso":26658,"kimball":26659,"karlsruhe":26660,"mab":26661,"##emy":26662,"ignores":26663,"phonetic":26664,"reuters":26665,"spaghetti":26666,"820":26667,"almighty":26668,"danzig":26669,"rumbling":26670,"tombstone":26671,"designations":26672,"lured":26673,"outset":26674,"##felt":26675,"supermarkets":26676,"##wt":26677,"grupo":26678,"kei":26679,"kraft":26680,"susanna":26681,"##blood":26682,"comprehension":26683,"genealogy":26684,"##aghan":26685,"##verted":26686,"redding":26687,"##ythe":26688,"1722":26689,"bowing":26690,"##pore":26691,"##roi":26692,"lest":26693,"sharpened":26694,"fulbright":26695,"valkyrie":26696,"sikhs":26697,"##unds":26698,"swans":26699,"bouquet":26700,"merritt":26701,"##tage":26702,"##venting":26703,"commuted":26704,"redhead":26705,"clerks":26706,"leasing":26707,"cesare":26708,"dea":26709,"hazy":26710,"##vances":26711,"fledged":26712,"greenfield":26713,"servicemen":26714,"##gical":26715,"armando":26716,"blackout":26717,"dt":26718,"sagged":26719,"downloadable":26720,"intra":26721,"potion":26722,"pods":26723,"##4th":26724,"##mism":26725,"xp":26726,"attendants":26727,"gambia":26728,"stale":26729,"##ntine":26730,"plump":26731,"asteroids":26732,"rediscovered":26733,"buds":26734,"flea":26735,"hive":26736,"##neas":26737,"1737":26738,"classifications":26739,"debuts":26740,"##eles":26741,"olympus":26742,"scala":26743,"##eurs":26744,"##gno":26745,"##mute":26746,"hummed":26747,"sigismund":26748,"visuals":26749,"wiggled":26750,"await":26751,"pilasters":26752,"clench":26753,"sulfate":26754,"##ances":26755,"bellevue":26756,"enigma":26757,"trainee":26758,"snort":26759,"##sw":26760,"clouded":26761,"denim":26762,"##rank":26763,"##rder":26764,"churning":26765,"hartman":26766,"lodges":26767,"riches":26768,"sima":26769,"##missible":26770,"accountable":26771,"socrates":26772,"regulates":26773,"mueller":26774,"##cr":26775,"1702":26776,"avoids":26777,"solids":26778,"himalayas":26779,"nutrient":26780,"pup":26781,"##jevic":26782,"squat":26783,"fades":26784,"nec":26785,"##lates":26786,"##pina":26787,"##rona":26788,"##ου":26789,"privateer":26790,"tequila":26791,"##gative":26792,"##mpton":26793,"apt":26794,"hornet":26795,"immortals":26796,"##dou":26797,"asturias":26798,"cleansing":26799,"dario":26800,"##rries":26801,"##anta":26802,"etymology":26803,"servicing":26804,"zhejiang":26805,"##venor":26806,"##nx":26807,"horned":26808,"erasmus":26809,"rayon":26810,"relocating":26811,"£10":26812,"##bags":26813,"escalated":26814,"promenade":26815,"stubble":26816,"2010s":26817,"artisans":26818,"axial":26819,"liquids":26820,"mora":26821,"sho":26822,"yoo":26823,"##tsky":26824,"bundles":26825,"oldies":26826,"##nally":26827,"notification":26828,"bastion":26829,"##ths":26830,"sparkle":26831,"##lved":26832,"1728":26833,"leash":26834,"pathogen":26835,"highs":26836,"##hmi":26837,"immature":26838,"880":26839,"gonzaga":26840,"ignatius":26841,"mansions":26842,"monterrey":26843,"sweets":26844,"bryson":26845,"##loe":26846,"polled":26847,"regatta":26848,"brightest":26849,"pei":26850,"rosy":26851,"squid":26852,"hatfield":26853,"payroll":26854,"addict":26855,"meath":26856,"cornerback":26857,"heaviest":26858,"lodging":26859,"##mage":26860,"capcom":26861,"rippled":26862,"##sily":26863,"barnet":26864,"mayhem":26865,"ymca":26866,"snuggled":26867,"rousseau":26868,"##cute":26869,"blanchard":26870,"284":26871,"fragmented":26872,"leighton":26873,"chromosomes":26874,"risking":26875,"##md":26876,"##strel":26877,"##utter":26878,"corinne":26879,"coyotes":26880,"cynical":26881,"hiroshi":26882,"yeomanry":26883,"##ractive":26884,"ebook":26885,"grading":26886,"mandela":26887,"plume":26888,"agustin":26889,"magdalene":26890,"##rkin":26891,"bea":26892,"femme":26893,"trafford":26894,"##coll":26895,"##lun":26896,"##tance":26897,"52nd":26898,"fourier":26899,"upton":26900,"##mental":26901,"camilla":26902,"gust":26903,"iihf":26904,"islamabad":26905,"longevity":26906,"##kala":26907,"feldman":26908,"netting":26909,"##rization":26910,"endeavour":26911,"foraging":26912,"mfa":26913,"orr":26914,"##open":26915,"greyish":26916,"contradiction":26917,"graz":26918,"##ruff":26919,"handicapped":26920,"marlene":26921,"tweed":26922,"oaxaca":26923,"spp":26924,"campos":26925,"miocene":26926,"pri":26927,"configured":26928,"cooks":26929,"pluto":26930,"cozy":26931,"pornographic":26932,"##entes":26933,"70th":26934,"fairness":26935,"glided":26936,"jonny":26937,"lynne":26938,"rounding":26939,"sired":26940,"##emon":26941,"##nist":26942,"remade":26943,"uncover":26944,"##mack":26945,"complied":26946,"lei":26947,"newsweek":26948,"##jured":26949,"##parts":26950,"##enting":26951,"##pg":26952,"293":26953,"finer":26954,"guerrillas":26955,"athenian":26956,"deng":26957,"disused":26958,"stepmother":26959,"accuse":26960,"gingerly":26961,"seduction":26962,"521":26963,"confronting":26964,"##walker":26965,"##going":26966,"gora":26967,"nostalgia":26968,"sabres":26969,"virginity":26970,"wrenched":26971,"##minated":26972,"syndication":26973,"wielding":26974,"eyre":26975,"##56":26976,"##gnon":26977,"##igny":26978,"behaved":26979,"taxpayer":26980,"sweeps":26981,"##growth":26982,"childless":26983,"gallant":26984,"##ywood":26985,"amplified":26986,"geraldine":26987,"scrape":26988,"##ffi":26989,"babylonian":26990,"fresco":26991,"##rdan":26992,"##kney":26993,"##position":26994,"1718":26995,"restricting":26996,"tack":26997,"fukuoka":26998,"osborn":26999,"selector":27000,"partnering":27001,"##dlow":27002,"318":27003,"gnu":27004,"kia":27005,"tak":27006,"whitley":27007,"gables":27008,"##54":27009,"##mania":27010,"mri":27011,"softness":27012,"immersion":27013,"##bots":27014,"##evsky":27015,"1713":27016,"chilling":27017,"insignificant":27018,"pcs":27019,"##uis":27020,"elites":27021,"lina":27022,"purported":27023,"supplemental":27024,"teaming":27025,"##americana":27026,"##dding":27027,"##inton":27028,"proficient":27029,"rouen":27030,"##nage":27031,"##rret":27032,"niccolo":27033,"selects":27034,"##bread":27035,"fluffy":27036,"1621":27037,"gruff":27038,"knotted":27039,"mukherjee":27040,"polgara":27041,"thrash":27042,"nicholls":27043,"secluded":27044,"smoothing":27045,"thru":27046,"corsica":27047,"loaf":27048,"whitaker":27049,"inquiries":27050,"##rrier":27051,"##kam":27052,"indochina":27053,"289":27054,"marlins":27055,"myles":27056,"peking":27057,"##tea":27058,"extracts":27059,"pastry":27060,"superhuman":27061,"connacht":27062,"vogel":27063,"##ditional":27064,"##het":27065,"##udged":27066,"##lash":27067,"gloss":27068,"quarries":27069,"refit":27070,"teaser":27071,"##alic":27072,"##gaon":27073,"20s":27074,"materialized":27075,"sling":27076,"camped":27077,"pickering":27078,"tung":27079,"tracker":27080,"pursuant":27081,"##cide":27082,"cranes":27083,"soc":27084,"##cini":27085,"##typical":27086,"##viere":27087,"anhalt":27088,"overboard":27089,"workout":27090,"chores":27091,"fares":27092,"orphaned":27093,"stains":27094,"##logie":27095,"fenton":27096,"surpassing":27097,"joyah":27098,"triggers":27099,"##itte":27100,"grandmaster":27101,"##lass":27102,"##lists":27103,"clapping":27104,"fraudulent":27105,"ledger":27106,"nagasaki":27107,"##cor":27108,"##nosis":27109,"##tsa":27110,"eucalyptus":27111,"tun":27112,"##icio":27113,"##rney":27114,"##tara":27115,"dax":27116,"heroism":27117,"ina":27118,"wrexham":27119,"onboard":27120,"unsigned":27121,"##dates":27122,"moshe":27123,"galley":27124,"winnie":27125,"droplets":27126,"exiles":27127,"praises":27128,"watered":27129,"noodles":27130,"##aia":27131,"fein":27132,"adi":27133,"leland":27134,"multicultural":27135,"stink":27136,"bingo":27137,"comets":27138,"erskine":27139,"modernized":27140,"canned":27141,"constraint":27142,"domestically":27143,"chemotherapy":27144,"featherweight":27145,"stifled":27146,"##mum":27147,"darkly":27148,"irresistible":27149,"refreshing":27150,"hasty":27151,"isolate":27152,"##oys":27153,"kitchener":27154,"planners":27155,"##wehr":27156,"cages":27157,"yarn":27158,"implant":27159,"toulon":27160,"elects":27161,"childbirth":27162,"yue":27163,"##lind":27164,"##lone":27165,"cn":27166,"rightful":27167,"sportsman":27168,"junctions":27169,"remodeled":27170,"specifies":27171,"##rgh":27172,"291":27173,"##oons":27174,"complimented":27175,"##urgent":27176,"lister":27177,"ot":27178,"##logic":27179,"bequeathed":27180,"cheekbones":27181,"fontana":27182,"gabby":27183,"##dial":27184,"amadeus":27185,"corrugated":27186,"maverick":27187,"resented":27188,"triangles":27189,"##hered":27190,"##usly":27191,"nazareth":27192,"tyrol":27193,"1675":27194,"assent":27195,"poorer":27196,"sectional":27197,"aegean":27198,"##cous":27199,"296":27200,"nylon":27201,"ghanaian":27202,"##egorical":27203,"##weig":27204,"cushions":27205,"forbid":27206,"fusiliers":27207,"obstruction":27208,"somerville":27209,"##scia":27210,"dime":27211,"earrings":27212,"elliptical":27213,"leyte":27214,"oder":27215,"polymers":27216,"timmy":27217,"atm":27218,"midtown":27219,"piloted":27220,"settles":27221,"continual":27222,"externally":27223,"mayfield":27224,"##uh":27225,"enrichment":27226,"henson":27227,"keane":27228,"persians":27229,"1733":27230,"benji":27231,"braden":27232,"pep":27233,"324":27234,"##efe":27235,"contenders":27236,"pepsi":27237,"valet":27238,"##isches":27239,"298":27240,"##asse":27241,"##earing":27242,"goofy":27243,"stroll":27244,"##amen":27245,"authoritarian":27246,"occurrences":27247,"adversary":27248,"ahmedabad":27249,"tangent":27250,"toppled":27251,"dorchester":27252,"1672":27253,"modernism":27254,"marxism":27255,"islamist":27256,"charlemagne":27257,"exponential":27258,"racks":27259,"unicode":27260,"brunette":27261,"mbc":27262,"pic":27263,"skirmish":27264,"##bund":27265,"##lad":27266,"##powered":27267,"##yst":27268,"hoisted":27269,"messina":27270,"shatter":27271,"##ctum":27272,"jedi":27273,"vantage":27274,"##music":27275,"##neil":27276,"clemens":27277,"mahmoud":27278,"corrupted":27279,"authentication":27280,"lowry":27281,"nils":27282,"##washed":27283,"omnibus":27284,"wounding":27285,"jillian":27286,"##itors":27287,"##opped":27288,"serialized":27289,"narcotics":27290,"handheld":27291,"##arm":27292,"##plicity":27293,"intersecting":27294,"stimulating":27295,"##onis":27296,"crate":27297,"fellowships":27298,"hemingway":27299,"casinos":27300,"climatic":27301,"fordham":27302,"copeland":27303,"drip":27304,"beatty":27305,"leaflets":27306,"robber":27307,"brothel":27308,"madeira":27309,"##hedral":27310,"sphinx":27311,"ultrasound":27312,"##vana":27313,"valor":27314,"forbade":27315,"leonid":27316,"villas":27317,"##aldo":27318,"duane":27319,"marquez":27320,"##cytes":27321,"disadvantaged":27322,"forearms":27323,"kawasaki":27324,"reacts":27325,"consular":27326,"lax":27327,"uncles":27328,"uphold":27329,"##hopper":27330,"concepcion":27331,"dorsey":27332,"lass":27333,"##izan":27334,"arching":27335,"passageway":27336,"1708":27337,"researches":27338,"tia":27339,"internationals":27340,"##graphs":27341,"##opers":27342,"distinguishes":27343,"javanese":27344,"divert":27345,"##uven":27346,"plotted":27347,"##listic":27348,"##rwin":27349,"##erik":27350,"##tify":27351,"affirmative":27352,"signifies":27353,"validation":27354,"##bson":27355,"kari":27356,"felicity":27357,"georgina":27358,"zulu":27359,"##eros":27360,"##rained":27361,"##rath":27362,"overcoming":27363,"##dot":27364,"argyll":27365,"##rbin":27366,"1734":27367,"chiba":27368,"ratification":27369,"windy":27370,"earls":27371,"parapet":27372,"##marks":27373,"hunan":27374,"pristine":27375,"astrid":27376,"punta":27377,"##gart":27378,"brodie":27379,"##kota":27380,"##oder":27381,"malaga":27382,"minerva":27383,"rouse":27384,"##phonic":27385,"bellowed":27386,"pagoda":27387,"portals":27388,"reclamation":27389,"##gur":27390,"##odies":27391,"##⁄₄":27392,"parentheses":27393,"quoting":27394,"allergic":27395,"palette":27396,"showcases":27397,"benefactor":27398,"heartland":27399,"nonlinear":27400,"##tness":27401,"bladed":27402,"cheerfully":27403,"scans":27404,"##ety":27405,"##hone":27406,"1666":27407,"girlfriends":27408,"pedersen":27409,"hiram":27410,"sous":27411,"##liche":27412,"##nator":27413,"1683":27414,"##nery":27415,"##orio":27416,"##umen":27417,"bobo":27418,"primaries":27419,"smiley":27420,"##cb":27421,"unearthed":27422,"uniformly":27423,"fis":27424,"metadata":27425,"1635":27426,"ind":27427,"##oted":27428,"recoil":27429,"##titles":27430,"##tura":27431,"##ια":27432,"406":27433,"hilbert":27434,"jamestown":27435,"mcmillan":27436,"tulane":27437,"seychelles":27438,"##frid":27439,"antics":27440,"coli":27441,"fated":27442,"stucco":27443,"##grants":27444,"1654":27445,"bulky":27446,"accolades":27447,"arrays":27448,"caledonian":27449,"carnage":27450,"optimism":27451,"puebla":27452,"##tative":27453,"##cave":27454,"enforcing":27455,"rotherham":27456,"seo":27457,"dunlop":27458,"aeronautics":27459,"chimed":27460,"incline":27461,"zoning":27462,"archduke":27463,"hellenistic":27464,"##oses":27465,"##sions":27466,"candi":27467,"thong":27468,"##ople":27469,"magnate":27470,"rustic":27471,"##rsk":27472,"projective":27473,"slant":27474,"##offs":27475,"danes":27476,"hollis":27477,"vocalists":27478,"##ammed":27479,"congenital":27480,"contend":27481,"gesellschaft":27482,"##ocating":27483,"##pressive":27484,"douglass":27485,"quieter":27486,"##cm":27487,"##kshi":27488,"howled":27489,"salim":27490,"spontaneously":27491,"townsville":27492,"buena":27493,"southport":27494,"##bold":27495,"kato":27496,"1638":27497,"faerie":27498,"stiffly":27499,"##vus":27500,"##rled":27501,"297":27502,"flawless":27503,"realising":27504,"taboo":27505,"##7th":27506,"bytes":27507,"straightening":27508,"356":27509,"jena":27510,"##hid":27511,"##rmin":27512,"cartwright":27513,"berber":27514,"bertram":27515,"soloists":27516,"411":27517,"noses":27518,"417":27519,"coping":27520,"fission":27521,"hardin":27522,"inca":27523,"##cen":27524,"1717":27525,"mobilized":27526,"vhf":27527,"##raf":27528,"biscuits":27529,"curate":27530,"##85":27531,"##anial":27532,"331":27533,"gaunt":27534,"neighbourhoods":27535,"1540":27536,"##abas":27537,"blanca":27538,"bypassed":27539,"sockets":27540,"behold":27541,"coincidentally":27542,"##bane":27543,"nara":27544,"shave":27545,"splinter":27546,"terrific":27547,"##arion":27548,"##erian":27549,"commonplace":27550,"juris":27551,"redwood":27552,"waistband":27553,"boxed":27554,"caitlin":27555,"fingerprints":27556,"jennie":27557,"naturalized":27558,"##ired":27559,"balfour":27560,"craters":27561,"jody":27562,"bungalow":27563,"hugely":27564,"quilt":27565,"glitter":27566,"pigeons":27567,"undertaker":27568,"bulging":27569,"constrained":27570,"goo":27571,"##sil":27572,"##akh":27573,"assimilation":27574,"reworked":27575,"##person":27576,"persuasion":27577,"##pants":27578,"felicia":27579,"##cliff":27580,"##ulent":27581,"1732":27582,"explodes":27583,"##dun":27584,"##inium":27585,"##zic":27586,"lyman":27587,"vulture":27588,"hog":27589,"overlook":27590,"begs":27591,"northwards":27592,"ow":27593,"spoil":27594,"##urer":27595,"fatima":27596,"favorably":27597,"accumulate":27598,"sargent":27599,"sorority":27600,"corresponded":27601,"dispersal":27602,"kochi":27603,"toned":27604,"##imi":27605,"##lita":27606,"internacional":27607,"newfound":27608,"##agger":27609,"##lynn":27610,"##rigue":27611,"booths":27612,"peanuts":27613,"##eborg":27614,"medicare":27615,"muriel":27616,"nur":27617,"##uram":27618,"crates":27619,"millennia":27620,"pajamas":27621,"worsened":27622,"##breakers":27623,"jimi":27624,"vanuatu":27625,"yawned":27626,"##udeau":27627,"carousel":27628,"##hony":27629,"hurdle":27630,"##ccus":27631,"##mounted":27632,"##pod":27633,"rv":27634,"##eche":27635,"airship":27636,"ambiguity":27637,"compulsion":27638,"recapture":27639,"##claiming":27640,"arthritis":27641,"##osomal":27642,"1667":27643,"asserting":27644,"ngc":27645,"sniffing":27646,"dade":27647,"discontent":27648,"glendale":27649,"ported":27650,"##amina":27651,"defamation":27652,"rammed":27653,"##scent":27654,"fling":27655,"livingstone":27656,"##fleet":27657,"875":27658,"##ppy":27659,"apocalyptic":27660,"comrade":27661,"lcd":27662,"##lowe":27663,"cessna":27664,"eine":27665,"persecuted":27666,"subsistence":27667,"demi":27668,"hoop":27669,"reliefs":27670,"710":27671,"coptic":27672,"progressing":27673,"stemmed":27674,"perpetrators":27675,"1665":27676,"priestess":27677,"##nio":27678,"dobson":27679,"ebony":27680,"rooster":27681,"itf":27682,"tortricidae":27683,"##bbon":27684,"##jian":27685,"cleanup":27686,"##jean":27687,"##øy":27688,"1721":27689,"eighties":27690,"taxonomic":27691,"holiness":27692,"##hearted":27693,"##spar":27694,"antilles":27695,"showcasing":27696,"stabilized":27697,"##nb":27698,"gia":27699,"mascara":27700,"michelangelo":27701,"dawned":27702,"##uria":27703,"##vinsky":27704,"extinguished":27705,"fitz":27706,"grotesque":27707,"£100":27708,"##fera":27709,"##loid":27710,"##mous":27711,"barges":27712,"neue":27713,"throbbed":27714,"cipher":27715,"johnnie":27716,"##a1":27717,"##mpt":27718,"outburst":27719,"##swick":27720,"spearheaded":27721,"administrations":27722,"c1":27723,"heartbreak":27724,"pixels":27725,"pleasantly":27726,"##enay":27727,"lombardy":27728,"plush":27729,"##nsed":27730,"bobbie":27731,"##hly":27732,"reapers":27733,"tremor":27734,"xiang":27735,"minogue":27736,"substantive":27737,"hitch":27738,"barak":27739,"##wyl":27740,"kwan":27741,"##encia":27742,"910":27743,"obscene":27744,"elegance":27745,"indus":27746,"surfer":27747,"bribery":27748,"conserve":27749,"##hyllum":27750,"##masters":27751,"horatio":27752,"##fat":27753,"apes":27754,"rebound":27755,"psychotic":27756,"##pour":27757,"iteration":27758,"##mium":27759,"##vani":27760,"botanic":27761,"horribly":27762,"antiques":27763,"dispose":27764,"paxton":27765,"##hli":27766,"##wg":27767,"timeless":27768,"1704":27769,"disregard":27770,"engraver":27771,"hounds":27772,"##bau":27773,"##version":27774,"looted":27775,"uno":27776,"facilitates":27777,"groans":27778,"masjid":27779,"rutland":27780,"antibody":27781,"disqualification":27782,"decatur":27783,"footballers":27784,"quake":27785,"slacks":27786,"48th":27787,"rein":27788,"scribe":27789,"stabilize":27790,"commits":27791,"exemplary":27792,"tho":27793,"##hort":27794,"##chison":27795,"pantry":27796,"traversed":27797,"##hiti":27798,"disrepair":27799,"identifiable":27800,"vibrated":27801,"baccalaureate":27802,"##nnis":27803,"csa":27804,"interviewing":27805,"##iensis":27806,"##raße":27807,"greaves":27808,"wealthiest":27809,"343":27810,"classed":27811,"jogged":27812,"£5":27813,"##58":27814,"##atal":27815,"illuminating":27816,"knicks":27817,"respecting":27818,"##uno":27819,"scrubbed":27820,"##iji":27821,"##dles":27822,"kruger":27823,"moods":27824,"growls":27825,"raider":27826,"silvia":27827,"chefs":27828,"kam":27829,"vr":27830,"cree":27831,"percival":27832,"##terol":27833,"gunter":27834,"counterattack":27835,"defiant":27836,"henan":27837,"ze":27838,"##rasia":27839,"##riety":27840,"equivalence":27841,"submissions":27842,"##fra":27843,"##thor":27844,"bautista":27845,"mechanically":27846,"##heater":27847,"cornice":27848,"herbal":27849,"templar":27850,"##mering":27851,"outputs":27852,"ruining":27853,"ligand":27854,"renumbered":27855,"extravagant":27856,"mika":27857,"blockbuster":27858,"eta":27859,"insurrection":27860,"##ilia":27861,"darkening":27862,"ferocious":27863,"pianos":27864,"strife":27865,"kinship":27866,"##aer":27867,"melee":27868,"##anor":27869,"##iste":27870,"##may":27871,"##oue":27872,"decidedly":27873,"weep":27874,"##jad":27875,"##missive":27876,"##ppel":27877,"354":27878,"puget":27879,"unease":27880,"##gnant":27881,"1629":27882,"hammering":27883,"kassel":27884,"ob":27885,"wessex":27886,"##lga":27887,"bromwich":27888,"egan":27889,"paranoia":27890,"utilization":27891,"##atable":27892,"##idad":27893,"contradictory":27894,"provoke":27895,"##ols":27896,"##ouring":27897,"##tangled":27898,"knesset":27899,"##very":27900,"##lette":27901,"plumbing":27902,"##sden":27903,"##¹":27904,"greensboro":27905,"occult":27906,"sniff":27907,"338":27908,"zev":27909,"beaming":27910,"gamer":27911,"haggard":27912,"mahal":27913,"##olt":27914,"##pins":27915,"mendes":27916,"utmost":27917,"briefing":27918,"gunnery":27919,"##gut":27920,"##pher":27921,"##zh":27922,"##rok":27923,"1679":27924,"khalifa":27925,"sonya":27926,"##boot":27927,"principals":27928,"urbana":27929,"wiring":27930,"##liffe":27931,"##minating":27932,"##rrado":27933,"dahl":27934,"nyu":27935,"skepticism":27936,"np":27937,"townspeople":27938,"ithaca":27939,"lobster":27940,"somethin":27941,"##fur":27942,"##arina":27943,"##−1":27944,"freighter":27945,"zimmerman":27946,"biceps":27947,"contractual":27948,"##herton":27949,"amend":27950,"hurrying":27951,"subconscious":27952,"##anal":27953,"336":27954,"meng":27955,"clermont":27956,"spawning":27957,"##eia":27958,"##lub":27959,"dignitaries":27960,"impetus":27961,"snacks":27962,"spotting":27963,"twigs":27964,"##bilis":27965,"##cz":27966,"##ouk":27967,"libertadores":27968,"nic":27969,"skylar":27970,"##aina":27971,"##firm":27972,"gustave":27973,"asean":27974,"##anum":27975,"dieter":27976,"legislatures":27977,"flirt":27978,"bromley":27979,"trolls":27980,"umar":27981,"##bbies":27982,"##tyle":27983,"blah":27984,"parc":27985,"bridgeport":27986,"crank":27987,"negligence":27988,"##nction":27989,"46th":27990,"constantin":27991,"molded":27992,"bandages":27993,"seriousness":27994,"00pm":27995,"siegel":27996,"carpets":27997,"compartments":27998,"upbeat":27999,"statehood":28000,"##dner":28001,"##edging":28002,"marko":28003,"730":28004,"platt":28005,"##hane":28006,"paving":28007,"##iy":28008,"1738":28009,"abbess":28010,"impatience":28011,"limousine":28012,"nbl":28013,"##talk":28014,"441":28015,"lucille":28016,"mojo":28017,"nightfall":28018,"robbers":28019,"##nais":28020,"karel":28021,"brisk":28022,"calves":28023,"replicate":28024,"ascribed":28025,"telescopes":28026,"##olf":28027,"intimidated":28028,"##reen":28029,"ballast":28030,"specialization":28031,"##sit":28032,"aerodynamic":28033,"caliphate":28034,"rainer":28035,"visionary":28036,"##arded":28037,"epsilon":28038,"##aday":28039,"##onte":28040,"aggregation":28041,"auditory":28042,"boosted":28043,"reunification":28044,"kathmandu":28045,"loco":28046,"robyn":28047,"402":28048,"acknowledges":28049,"appointing":28050,"humanoid":28051,"newell":28052,"redeveloped":28053,"restraints":28054,"##tained":28055,"barbarians":28056,"chopper":28057,"1609":28058,"italiana":28059,"##lez":28060,"##lho":28061,"investigates":28062,"wrestlemania":28063,"##anies":28064,"##bib":28065,"690":28066,"##falls":28067,"creaked":28068,"dragoons":28069,"gravely":28070,"minions":28071,"stupidity":28072,"volley":28073,"##harat":28074,"##week":28075,"musik":28076,"##eries":28077,"##uously":28078,"fungal":28079,"massimo":28080,"semantics":28081,"malvern":28082,"##ahl":28083,"##pee":28084,"discourage":28085,"embryo":28086,"imperialism":28087,"1910s":28088,"profoundly":28089,"##ddled":28090,"jiangsu":28091,"sparkled":28092,"stat":28093,"##holz":28094,"sweatshirt":28095,"tobin":28096,"##iction":28097,"sneered":28098,"##cheon":28099,"##oit":28100,"brit":28101,"causal":28102,"smyth":28103,"##neuve":28104,"diffuse":28105,"perrin":28106,"silvio":28107,"##ipes":28108,"##recht":28109,"detonated":28110,"iqbal":28111,"selma":28112,"##nism":28113,"##zumi":28114,"roasted":28115,"##riders":28116,"tay":28117,"##ados":28118,"##mament":28119,"##mut":28120,"##rud":28121,"840":28122,"completes":28123,"nipples":28124,"cfa":28125,"flavour":28126,"hirsch":28127,"##laus":28128,"calderon":28129,"sneakers":28130,"moravian":28131,"##ksha":28132,"1622":28133,"rq":28134,"294":28135,"##imeters":28136,"bodo":28137,"##isance":28138,"##pre":28139,"##ronia":28140,"anatomical":28141,"excerpt":28142,"##lke":28143,"dh":28144,"kunst":28145,"##tablished":28146,"##scoe":28147,"biomass":28148,"panted":28149,"unharmed":28150,"gael":28151,"housemates":28152,"montpellier":28153,"##59":28154,"coa":28155,"rodents":28156,"tonic":28157,"hickory":28158,"singleton":28159,"##taro":28160,"451":28161,"1719":28162,"aldo":28163,"breaststroke":28164,"dempsey":28165,"och":28166,"rocco":28167,"##cuit":28168,"merton":28169,"dissemination":28170,"midsummer":28171,"serials":28172,"##idi":28173,"haji":28174,"polynomials":28175,"##rdon":28176,"gs":28177,"enoch":28178,"prematurely":28179,"shutter":28180,"taunton":28181,"£3":28182,"##grating":28183,"##inates":28184,"archangel":28185,"harassed":28186,"##asco":28187,"326":28188,"archway":28189,"dazzling":28190,"##ecin":28191,"1736":28192,"sumo":28193,"wat":28194,"##kovich":28195,"1086":28196,"honneur":28197,"##ently":28198,"##nostic":28199,"##ttal":28200,"##idon":28201,"1605":28202,"403":28203,"1716":28204,"blogger":28205,"rents":28206,"##gnan":28207,"hires":28208,"##ikh":28209,"##dant":28210,"howie":28211,"##rons":28212,"handler":28213,"retracted":28214,"shocks":28215,"1632":28216,"arun":28217,"duluth":28218,"kepler":28219,"trumpeter":28220,"##lary":28221,"peeking":28222,"seasoned":28223,"trooper":28224,"##mara":28225,"laszlo":28226,"##iciencies":28227,"##rti":28228,"heterosexual":28229,"##inatory":28230,"##ssion":28231,"indira":28232,"jogging":28233,"##inga":28234,"##lism":28235,"beit":28236,"dissatisfaction":28237,"malice":28238,"##ately":28239,"nedra":28240,"peeling":28241,"##rgeon":28242,"47th":28243,"stadiums":28244,"475":28245,"vertigo":28246,"##ains":28247,"iced":28248,"restroom":28249,"##plify":28250,"##tub":28251,"illustrating":28252,"pear":28253,"##chner":28254,"##sibility":28255,"inorganic":28256,"rappers":28257,"receipts":28258,"watery":28259,"##kura":28260,"lucinda":28261,"##oulos":28262,"reintroduced":28263,"##8th":28264,"##tched":28265,"gracefully":28266,"saxons":28267,"nutritional":28268,"wastewater":28269,"rained":28270,"favourites":28271,"bedrock":28272,"fisted":28273,"hallways":28274,"likeness":28275,"upscale":28276,"##lateral":28277,"1580":28278,"blinds":28279,"prequel":28280,"##pps":28281,"##tama":28282,"deter":28283,"humiliating":28284,"restraining":28285,"tn":28286,"vents":28287,"1659":28288,"laundering":28289,"recess":28290,"rosary":28291,"tractors":28292,"coulter":28293,"federer":28294,"##ifiers":28295,"##plin":28296,"persistence":28297,"##quitable":28298,"geschichte":28299,"pendulum":28300,"quakers":28301,"##beam":28302,"bassett":28303,"pictorial":28304,"buffet":28305,"koln":28306,"##sitor":28307,"drills":28308,"reciprocal":28309,"shooters":28310,"##57":28311,"##cton":28312,"##tees":28313,"converge":28314,"pip":28315,"dmitri":28316,"donnelly":28317,"yamamoto":28318,"aqua":28319,"azores":28320,"demographics":28321,"hypnotic":28322,"spitfire":28323,"suspend":28324,"wryly":28325,"roderick":28326,"##rran":28327,"sebastien":28328,"##asurable":28329,"mavericks":28330,"##fles":28331,"##200":28332,"himalayan":28333,"prodigy":28334,"##iance":28335,"transvaal":28336,"demonstrators":28337,"handcuffs":28338,"dodged":28339,"mcnamara":28340,"sublime":28341,"1726":28342,"crazed":28343,"##efined":28344,"##till":28345,"ivo":28346,"pondered":28347,"reconciled":28348,"shrill":28349,"sava":28350,"##duk":28351,"bal":28352,"cad":28353,"heresy":28354,"jaipur":28355,"goran":28356,"##nished":28357,"341":28358,"lux":28359,"shelly":28360,"whitehall":28361,"##hre":28362,"israelis":28363,"peacekeeping":28364,"##wled":28365,"1703":28366,"demetrius":28367,"ousted":28368,"##arians":28369,"##zos":28370,"beale":28371,"anwar":28372,"backstroke":28373,"raged":28374,"shrinking":28375,"cremated":28376,"##yck":28377,"benign":28378,"towing":28379,"wadi":28380,"darmstadt":28381,"landfill":28382,"parana":28383,"soothe":28384,"colleen":28385,"sidewalks":28386,"mayfair":28387,"tumble":28388,"hepatitis":28389,"ferrer":28390,"superstructure":28391,"##gingly":28392,"##urse":28393,"##wee":28394,"anthropological":28395,"translators":28396,"##mies":28397,"closeness":28398,"hooves":28399,"##pw":28400,"mondays":28401,"##roll":28402,"##vita":28403,"landscaping":28404,"##urized":28405,"purification":28406,"sock":28407,"thorns":28408,"thwarted":28409,"jalan":28410,"tiberius":28411,"##taka":28412,"saline":28413,"##rito":28414,"confidently":28415,"khyber":28416,"sculptors":28417,"##ij":28418,"brahms":28419,"hammersmith":28420,"inspectors":28421,"battista":28422,"fivb":28423,"fragmentation":28424,"hackney":28425,"##uls":28426,"arresting":28427,"exercising":28428,"antoinette":28429,"bedfordshire":28430,"##zily":28431,"dyed":28432,"##hema":28433,"1656":28434,"racetrack":28435,"variability":28436,"##tique":28437,"1655":28438,"austrians":28439,"deteriorating":28440,"madman":28441,"theorists":28442,"aix":28443,"lehman":28444,"weathered":28445,"1731":28446,"decreed":28447,"eruptions":28448,"1729":28449,"flaw":28450,"quinlan":28451,"sorbonne":28452,"flutes":28453,"nunez":28454,"1711":28455,"adored":28456,"downwards":28457,"fable":28458,"rasped":28459,"1712":28460,"moritz":28461,"mouthful":28462,"renegade":28463,"shivers":28464,"stunts":28465,"dysfunction":28466,"restrain":28467,"translit":28468,"327":28469,"pancakes":28470,"##avio":28471,"##cision":28472,"##tray":28473,"351":28474,"vial":28475,"##lden":28476,"bain":28477,"##maid":28478,"##oxide":28479,"chihuahua":28480,"malacca":28481,"vimes":28482,"##rba":28483,"##rnier":28484,"1664":28485,"donnie":28486,"plaques":28487,"##ually":28488,"337":28489,"bangs":28490,"floppy":28491,"huntsville":28492,"loretta":28493,"nikolay":28494,"##otte":28495,"eater":28496,"handgun":28497,"ubiquitous":28498,"##hett":28499,"eras":28500,"zodiac":28501,"1634":28502,"##omorphic":28503,"1820s":28504,"##zog":28505,"cochran":28506,"##bula":28507,"##lithic":28508,"warring":28509,"##rada":28510,"dalai":28511,"excused":28512,"blazers":28513,"mcconnell":28514,"reeling":28515,"bot":28516,"este":28517,"##abi":28518,"geese":28519,"hoax":28520,"taxon":28521,"##bla":28522,"guitarists":28523,"##icon":28524,"condemning":28525,"hunts":28526,"inversion":28527,"moffat":28528,"taekwondo":28529,"##lvis":28530,"1624":28531,"stammered":28532,"##rest":28533,"##rzy":28534,"sousa":28535,"fundraiser":28536,"marylebone":28537,"navigable":28538,"uptown":28539,"cabbage":28540,"daniela":28541,"salman":28542,"shitty":28543,"whimper":28544,"##kian":28545,"##utive":28546,"programmers":28547,"protections":28548,"rm":28549,"##rmi":28550,"##rued":28551,"forceful":28552,"##enes":28553,"fuss":28554,"##tao":28555,"##wash":28556,"brat":28557,"oppressive":28558,"reykjavik":28559,"spartak":28560,"ticking":28561,"##inkles":28562,"##kiewicz":28563,"adolph":28564,"horst":28565,"maui":28566,"protege":28567,"straighten":28568,"cpc":28569,"landau":28570,"concourse":28571,"clements":28572,"resultant":28573,"##ando":28574,"imaginative":28575,"joo":28576,"reactivated":28577,"##rem":28578,"##ffled":28579,"##uising":28580,"consultative":28581,"##guide":28582,"flop":28583,"kaitlyn":28584,"mergers":28585,"parenting":28586,"somber":28587,"##vron":28588,"supervise":28589,"vidhan":28590,"##imum":28591,"courtship":28592,"exemplified":28593,"harmonies":28594,"medallist":28595,"refining":28596,"##rrow":28597,"##ка":28598,"amara":28599,"##hum":28600,"780":28601,"goalscorer":28602,"sited":28603,"overshadowed":28604,"rohan":28605,"displeasure":28606,"secretive":28607,"multiplied":28608,"osman":28609,"##orth":28610,"engravings":28611,"padre":28612,"##kali":28613,"##veda":28614,"miniatures":28615,"mis":28616,"##yala":28617,"clap":28618,"pali":28619,"rook":28620,"##cana":28621,"1692":28622,"57th":28623,"antennae":28624,"astro":28625,"oskar":28626,"1628":28627,"bulldog":28628,"crotch":28629,"hackett":28630,"yucatan":28631,"##sure":28632,"amplifiers":28633,"brno":28634,"ferrara":28635,"migrating":28636,"##gree":28637,"thanking":28638,"turing":28639,"##eza":28640,"mccann":28641,"ting":28642,"andersson":28643,"onslaught":28644,"gaines":28645,"ganga":28646,"incense":28647,"standardization":28648,"##mation":28649,"sentai":28650,"scuba":28651,"stuffing":28652,"turquoise":28653,"waivers":28654,"alloys":28655,"##vitt":28656,"regaining":28657,"vaults":28658,"##clops":28659,"##gizing":28660,"digger":28661,"furry":28662,"memorabilia":28663,"probing":28664,"##iad":28665,"payton":28666,"rec":28667,"deutschland":28668,"filippo":28669,"opaque":28670,"seamen":28671,"zenith":28672,"afrikaans":28673,"##filtration":28674,"disciplined":28675,"inspirational":28676,"##merie":28677,"banco":28678,"confuse":28679,"grafton":28680,"tod":28681,"##dgets":28682,"championed":28683,"simi":28684,"anomaly":28685,"biplane":28686,"##ceptive":28687,"electrode":28688,"##para":28689,"1697":28690,"cleavage":28691,"crossbow":28692,"swirl":28693,"informant":28694,"##lars":28695,"##osta":28696,"afi":28697,"bonfire":28698,"spec":28699,"##oux":28700,"lakeside":28701,"slump":28702,"##culus":28703,"##lais":28704,"##qvist":28705,"##rrigan":28706,"1016":28707,"facades":28708,"borg":28709,"inwardly":28710,"cervical":28711,"xl":28712,"pointedly":28713,"050":28714,"stabilization":28715,"##odon":28716,"chests":28717,"1699":28718,"hacked":28719,"ctv":28720,"orthogonal":28721,"suzy":28722,"##lastic":28723,"gaulle":28724,"jacobite":28725,"rearview":28726,"##cam":28727,"##erted":28728,"ashby":28729,"##drik":28730,"##igate":28731,"##mise":28732,"##zbek":28733,"affectionately":28734,"canine":28735,"disperse":28736,"latham":28737,"##istles":28738,"##ivar":28739,"spielberg":28740,"##orin":28741,"##idium":28742,"ezekiel":28743,"cid":28744,"##sg":28745,"durga":28746,"middletown":28747,"##cina":28748,"customized":28749,"frontiers":28750,"harden":28751,"##etano":28752,"##zzy":28753,"1604":28754,"bolsheviks":28755,"##66":28756,"coloration":28757,"yoko":28758,"##bedo":28759,"briefs":28760,"slabs":28761,"debra":28762,"liquidation":28763,"plumage":28764,"##oin":28765,"blossoms":28766,"dementia":28767,"subsidy":28768,"1611":28769,"proctor":28770,"relational":28771,"jerseys":28772,"parochial":28773,"ter":28774,"##ici":28775,"esa":28776,"peshawar":28777,"cavalier":28778,"loren":28779,"cpi":28780,"idiots":28781,"shamrock":28782,"1646":28783,"dutton":28784,"malabar":28785,"mustache":28786,"##endez":28787,"##ocytes":28788,"referencing":28789,"terminates":28790,"marche":28791,"yarmouth":28792,"##sop":28793,"acton":28794,"mated":28795,"seton":28796,"subtly":28797,"baptised":28798,"beige":28799,"extremes":28800,"jolted":28801,"kristina":28802,"telecast":28803,"##actic":28804,"safeguard":28805,"waldo":28806,"##baldi":28807,"##bular":28808,"endeavors":28809,"sloppy":28810,"subterranean":28811,"##ensburg":28812,"##itung":28813,"delicately":28814,"pigment":28815,"tq":28816,"##scu":28817,"1626":28818,"##ound":28819,"collisions":28820,"coveted":28821,"herds":28822,"##personal":28823,"##meister":28824,"##nberger":28825,"chopra":28826,"##ricting":28827,"abnormalities":28828,"defective":28829,"galician":28830,"lucie":28831,"##dilly":28832,"alligator":28833,"likened":28834,"##genase":28835,"burundi":28836,"clears":28837,"complexion":28838,"derelict":28839,"deafening":28840,"diablo":28841,"fingered":28842,"champaign":28843,"dogg":28844,"enlist":28845,"isotope":28846,"labeling":28847,"mrna":28848,"##erre":28849,"brilliance":28850,"marvelous":28851,"##ayo":28852,"1652":28853,"crawley":28854,"ether":28855,"footed":28856,"dwellers":28857,"deserts":28858,"hamish":28859,"rubs":28860,"warlock":28861,"skimmed":28862,"##lizer":28863,"870":28864,"buick":28865,"embark":28866,"heraldic":28867,"irregularities":28868,"##ajan":28869,"kiara":28870,"##kulam":28871,"##ieg":28872,"antigen":28873,"kowalski":28874,"##lge":28875,"oakley":28876,"visitation":28877,"##mbit":28878,"vt":28879,"##suit":28880,"1570":28881,"murderers":28882,"##miento":28883,"##rites":28884,"chimneys":28885,"##sling":28886,"condemn":28887,"custer":28888,"exchequer":28889,"havre":28890,"##ghi":28891,"fluctuations":28892,"##rations":28893,"dfb":28894,"hendricks":28895,"vaccines":28896,"##tarian":28897,"nietzsche":28898,"biking":28899,"juicy":28900,"##duced":28901,"brooding":28902,"scrolling":28903,"selangor":28904,"##ragan":28905,"352":28906,"annum":28907,"boomed":28908,"seminole":28909,"sugarcane":28910,"##dna":28911,"departmental":28912,"dismissing":28913,"innsbruck":28914,"arteries":28915,"ashok":28916,"batavia":28917,"daze":28918,"kun":28919,"overtook":28920,"##rga":28921,"##tlan":28922,"beheaded":28923,"gaddafi":28924,"holm":28925,"electronically":28926,"faulty":28927,"galilee":28928,"fractures":28929,"kobayashi":28930,"##lized":28931,"gunmen":28932,"magma":28933,"aramaic":28934,"mala":28935,"eastenders":28936,"inference":28937,"messengers":28938,"bf":28939,"##qu":28940,"407":28941,"bathrooms":28942,"##vere":28943,"1658":28944,"flashbacks":28945,"ideally":28946,"misunderstood":28947,"##jali":28948,"##weather":28949,"mendez":28950,"##grounds":28951,"505":28952,"uncanny":28953,"##iii":28954,"1709":28955,"friendships":28956,"##nbc":28957,"sacrament":28958,"accommodated":28959,"reiterated":28960,"logistical":28961,"pebbles":28962,"thumped":28963,"##escence":28964,"administering":28965,"decrees":28966,"drafts":28967,"##flight":28968,"##cased":28969,"##tula":28970,"futuristic":28971,"picket":28972,"intimidation":28973,"winthrop":28974,"##fahan":28975,"interfered":28976,"339":28977,"afar":28978,"francoise":28979,"morally":28980,"uta":28981,"cochin":28982,"croft":28983,"dwarfs":28984,"##bruck":28985,"##dents":28986,"##nami":28987,"biker":28988,"##hner":28989,"##meral":28990,"nano":28991,"##isen":28992,"##ometric":28993,"##pres":28994,"##ан":28995,"brightened":28996,"meek":28997,"parcels":28998,"securely":28999,"gunners":29000,"##jhl":29001,"##zko":29002,"agile":29003,"hysteria":29004,"##lten":29005,"##rcus":29006,"bukit":29007,"champs":29008,"chevy":29009,"cuckoo":29010,"leith":29011,"sadler":29012,"theologians":29013,"welded":29014,"##section":29015,"1663":29016,"jj":29017,"plurality":29018,"xander":29019,"##rooms":29020,"##formed":29021,"shredded":29022,"temps":29023,"intimately":29024,"pau":29025,"tormented":29026,"##lok":29027,"##stellar":29028,"1618":29029,"charred":29030,"ems":29031,"essen":29032,"##mmel":29033,"alarms":29034,"spraying":29035,"ascot":29036,"blooms":29037,"twinkle":29038,"##abia":29039,"##apes":29040,"internment":29041,"obsidian":29042,"##chaft":29043,"snoop":29044,"##dav":29045,"##ooping":29046,"malibu":29047,"##tension":29048,"quiver":29049,"##itia":29050,"hays":29051,"mcintosh":29052,"travers":29053,"walsall":29054,"##ffie":29055,"1623":29056,"beverley":29057,"schwarz":29058,"plunging":29059,"structurally":29060,"m3":29061,"rosenthal":29062,"vikram":29063,"##tsk":29064,"770":29065,"ghz":29066,"##onda":29067,"##tiv":29068,"chalmers":29069,"groningen":29070,"pew":29071,"reckon":29072,"unicef":29073,"##rvis":29074,"55th":29075,"##gni":29076,"1651":29077,"sulawesi":29078,"avila":29079,"cai":29080,"metaphysical":29081,"screwing":29082,"turbulence":29083,"##mberg":29084,"augusto":29085,"samba":29086,"56th":29087,"baffled":29088,"momentary":29089,"toxin":29090,"##urian":29091,"##wani":29092,"aachen":29093,"condoms":29094,"dali":29095,"steppe":29096,"##3d":29097,"##app":29098,"##oed":29099,"##year":29100,"adolescence":29101,"dauphin":29102,"electrically":29103,"inaccessible":29104,"microscopy":29105,"nikita":29106,"##ega":29107,"atv":29108,"##cel":29109,"##enter":29110,"##oles":29111,"##oteric":29112,"##ы":29113,"accountants":29114,"punishments":29115,"wrongly":29116,"bribes":29117,"adventurous":29118,"clinch":29119,"flinders":29120,"southland":29121,"##hem":29122,"##kata":29123,"gough":29124,"##ciency":29125,"lads":29126,"soared":29127,"##ה":29128,"undergoes":29129,"deformation":29130,"outlawed":29131,"rubbish":29132,"##arus":29133,"##mussen":29134,"##nidae":29135,"##rzburg":29136,"arcs":29137,"##ingdon":29138,"##tituted":29139,"1695":29140,"wheelbase":29141,"wheeling":29142,"bombardier":29143,"campground":29144,"zebra":29145,"##lices":29146,"##oj":29147,"##bain":29148,"lullaby":29149,"##ecure":29150,"donetsk":29151,"wylie":29152,"grenada":29153,"##arding":29154,"##ης":29155,"squinting":29156,"eireann":29157,"opposes":29158,"##andra":29159,"maximal":29160,"runes":29161,"##broken":29162,"##cuting":29163,"##iface":29164,"##ror":29165,"##rosis":29166,"additive":29167,"britney":29168,"adultery":29169,"triggering":29170,"##drome":29171,"detrimental":29172,"aarhus":29173,"containment":29174,"jc":29175,"swapped":29176,"vichy":29177,"##ioms":29178,"madly":29179,"##oric":29180,"##rag":29181,"brant":29182,"##ckey":29183,"##trix":29184,"1560":29185,"1612":29186,"broughton":29187,"rustling":29188,"##stems":29189,"##uder":29190,"asbestos":29191,"mentoring":29192,"##nivorous":29193,"finley":29194,"leaps":29195,"##isan":29196,"apical":29197,"pry":29198,"slits":29199,"substitutes":29200,"##dict":29201,"intuitive":29202,"fantasia":29203,"insistent":29204,"unreasonable":29205,"##igen":29206,"##vna":29207,"domed":29208,"hannover":29209,"margot":29210,"ponder":29211,"##zziness":29212,"impromptu":29213,"jian":29214,"lc":29215,"rampage":29216,"stemming":29217,"##eft":29218,"andrey":29219,"gerais":29220,"whichever":29221,"amnesia":29222,"appropriated":29223,"anzac":29224,"clicks":29225,"modifying":29226,"ultimatum":29227,"cambrian":29228,"maids":29229,"verve":29230,"yellowstone":29231,"##mbs":29232,"conservatoire":29233,"##scribe":29234,"adherence":29235,"dinners":29236,"spectra":29237,"imperfect":29238,"mysteriously":29239,"sidekick":29240,"tatar":29241,"tuba":29242,"##aks":29243,"##ifolia":29244,"distrust":29245,"##athan":29246,"##zle":29247,"c2":29248,"ronin":29249,"zac":29250,"##pse":29251,"celaena":29252,"instrumentalist":29253,"scents":29254,"skopje":29255,"##mbling":29256,"comical":29257,"compensated":29258,"vidal":29259,"condor":29260,"intersect":29261,"jingle":29262,"wavelengths":29263,"##urrent":29264,"mcqueen":29265,"##izzly":29266,"carp":29267,"weasel":29268,"422":29269,"kanye":29270,"militias":29271,"postdoctoral":29272,"eugen":29273,"gunslinger":29274,"##ɛ":29275,"faux":29276,"hospice":29277,"##for":29278,"appalled":29279,"derivation":29280,"dwarves":29281,"##elis":29282,"dilapidated":29283,"##folk":29284,"astoria":29285,"philology":29286,"##lwyn":29287,"##otho":29288,"##saka":29289,"inducing":29290,"philanthropy":29291,"##bf":29292,"##itative":29293,"geek":29294,"markedly":29295,"sql":29296,"##yce":29297,"bessie":29298,"indices":29299,"rn":29300,"##flict":29301,"495":29302,"frowns":29303,"resolving":29304,"weightlifting":29305,"tugs":29306,"cleric":29307,"contentious":29308,"1653":29309,"mania":29310,"rms":29311,"##miya":29312,"##reate":29313,"##ruck":29314,"##tucket":29315,"bien":29316,"eels":29317,"marek":29318,"##ayton":29319,"##cence":29320,"discreet":29321,"unofficially":29322,"##ife":29323,"leaks":29324,"##bber":29325,"1705":29326,"332":29327,"dung":29328,"compressor":29329,"hillsborough":29330,"pandit":29331,"shillings":29332,"distal":29333,"##skin":29334,"381":29335,"##tat":29336,"##you":29337,"nosed":29338,"##nir":29339,"mangrove":29340,"undeveloped":29341,"##idia":29342,"textures":29343,"##inho":29344,"##500":29345,"##rise":29346,"ae":29347,"irritating":29348,"nay":29349,"amazingly":29350,"bancroft":29351,"apologetic":29352,"compassionate":29353,"kata":29354,"symphonies":29355,"##lovic":29356,"airspace":29357,"##lch":29358,"930":29359,"gifford":29360,"precautions":29361,"fulfillment":29362,"sevilla":29363,"vulgar":29364,"martinique":29365,"##urities":29366,"looting":29367,"piccolo":29368,"tidy":29369,"##dermott":29370,"quadrant":29371,"armchair":29372,"incomes":29373,"mathematicians":29374,"stampede":29375,"nilsson":29376,"##inking":29377,"##scan":29378,"foo":29379,"quarterfinal":29380,"##ostal":29381,"shang":29382,"shouldered":29383,"squirrels":29384,"##owe":29385,"344":29386,"vinegar":29387,"##bner":29388,"##rchy":29389,"##systems":29390,"delaying":29391,"##trics":29392,"ars":29393,"dwyer":29394,"rhapsody":29395,"sponsoring":29396,"##gration":29397,"bipolar":29398,"cinder":29399,"starters":29400,"##olio":29401,"##urst":29402,"421":29403,"signage":29404,"##nty":29405,"aground":29406,"figurative":29407,"mons":29408,"acquaintances":29409,"duets":29410,"erroneously":29411,"soyuz":29412,"elliptic":29413,"recreated":29414,"##cultural":29415,"##quette":29416,"##ssed":29417,"##tma":29418,"##zcz":29419,"moderator":29420,"scares":29421,"##itaire":29422,"##stones":29423,"##udence":29424,"juniper":29425,"sighting":29426,"##just":29427,"##nsen":29428,"britten":29429,"calabria":29430,"ry":29431,"bop":29432,"cramer":29433,"forsyth":29434,"stillness":29435,"##л":29436,"airmen":29437,"gathers":29438,"unfit":29439,"##umber":29440,"##upt":29441,"taunting":29442,"##rip":29443,"seeker":29444,"streamlined":29445,"##bution":29446,"holster":29447,"schumann":29448,"tread":29449,"vox":29450,"##gano":29451,"##onzo":29452,"strive":29453,"dil":29454,"reforming":29455,"covent":29456,"newbury":29457,"predicting":29458,"##orro":29459,"decorate":29460,"tre":29461,"##puted":29462,"andover":29463,"ie":29464,"asahi":29465,"dept":29466,"dunkirk":29467,"gills":29468,"##tori":29469,"buren":29470,"huskies":29471,"##stis":29472,"##stov":29473,"abstracts":29474,"bets":29475,"loosen":29476,"##opa":29477,"1682":29478,"yearning":29479,"##glio":29480,"##sir":29481,"berman":29482,"effortlessly":29483,"enamel":29484,"napoli":29485,"persist":29486,"##peration":29487,"##uez":29488,"attache":29489,"elisa":29490,"b1":29491,"invitations":29492,"##kic":29493,"accelerating":29494,"reindeer":29495,"boardwalk":29496,"clutches":29497,"nelly":29498,"polka":29499,"starbucks":29500,"##kei":29501,"adamant":29502,"huey":29503,"lough":29504,"unbroken":29505,"adventurer":29506,"embroidery":29507,"inspecting":29508,"stanza":29509,"##ducted":29510,"naia":29511,"taluka":29512,"##pone":29513,"##roids":29514,"chases":29515,"deprivation":29516,"florian":29517,"##jing":29518,"##ppet":29519,"earthly":29520,"##lib":29521,"##ssee":29522,"colossal":29523,"foreigner":29524,"vet":29525,"freaks":29526,"patrice":29527,"rosewood":29528,"triassic":29529,"upstate":29530,"##pkins":29531,"dominates":29532,"ata":29533,"chants":29534,"ks":29535,"vo":29536,"##400":29537,"##bley":29538,"##raya":29539,"##rmed":29540,"555":29541,"agra":29542,"infiltrate":29543,"##ailing":29544,"##ilation":29545,"##tzer":29546,"##uppe":29547,"##werk":29548,"binoculars":29549,"enthusiast":29550,"fujian":29551,"squeak":29552,"##avs":29553,"abolitionist":29554,"almeida":29555,"boredom":29556,"hampstead":29557,"marsden":29558,"rations":29559,"##ands":29560,"inflated":29561,"334":29562,"bonuses":29563,"rosalie":29564,"patna":29565,"##rco":29566,"329":29567,"detachments":29568,"penitentiary":29569,"54th":29570,"flourishing":29571,"woolf":29572,"##dion":29573,"##etched":29574,"papyrus":29575,"##lster":29576,"##nsor":29577,"##toy":29578,"bobbed":29579,"dismounted":29580,"endelle":29581,"inhuman":29582,"motorola":29583,"tbs":29584,"wince":29585,"wreath":29586,"##ticus":29587,"hideout":29588,"inspections":29589,"sanjay":29590,"disgrace":29591,"infused":29592,"pudding":29593,"stalks":29594,"##urbed":29595,"arsenic":29596,"leases":29597,"##hyl":29598,"##rrard":29599,"collarbone":29600,"##waite":29601,"##wil":29602,"dowry":29603,"##bant":29604,"##edance":29605,"genealogical":29606,"nitrate":29607,"salamanca":29608,"scandals":29609,"thyroid":29610,"necessitated":29611,"##!":29612,"##\"":29613,"###":29614,"##$":29615,"##%":29616,"##&":29617,"##'":29618,"##(":29619,"##)":29620,"##*":29621,"##+":29622,"##,":29623,"##-":29624,"##.":29625,"##/":29626,"##:":29627,"##;":29628,"##<":29629,"##=":29630,"##>":29631,"##?":29632,"##@":29633,"##[":29634,"##\\":29635,"##]":29636,"##^":29637,"##_":29638,"##`":29639,"##{":29640,"##|":29641,"##}":29642,"##~":29643,"##¡":29644,"##¢":29645,"##£":29646,"##¤":29647,"##¥":29648,"##¦":29649,"##§":29650,"##¨":29651,"##©":29652,"##ª":29653,"##«":29654,"##¬":29655,"##®":29656,"##±":29657,"##´":29658,"##µ":29659,"##¶":29660,"##·":29661,"##º":29662,"##»":29663,"##¼":29664,"##¾":29665,"##¿":29666,"##æ":29667,"##ð":29668,"##÷":29669,"##þ":29670,"##đ":29671,"##ħ":29672,"##ŋ":29673,"##œ":29674,"##ƒ":29675,"##ɐ":29676,"##ɑ":29677,"##ɒ":29678,"##ɔ":29679,"##ɕ":29680,"##ə":29681,"##ɡ":29682,"##ɣ":29683,"##ɨ":29684,"##ɪ":29685,"##ɫ":29686,"##ɬ":29687,"##ɯ":29688,"##ɲ":29689,"##ɴ":29690,"##ɹ":29691,"##ɾ":29692,"##ʀ":29693,"##ʁ":29694,"##ʂ":29695,"##ʃ":29696,"##ʉ":29697,"##ʊ":29698,"##ʋ":29699,"##ʌ":29700,"##ʎ":29701,"##ʐ":29702,"##ʑ":29703,"##ʒ":29704,"##ʔ":29705,"##ʰ":29706,"##ʲ":29707,"##ʳ":29708,"##ʷ":29709,"##ʸ":29710,"##ʻ":29711,"##ʼ":29712,"##ʾ":29713,"##ʿ":29714,"##ˈ":29715,"##ˡ":29716,"##ˢ":29717,"##ˣ":29718,"##ˤ":29719,"##β":29720,"##γ":29721,"##δ":29722,"##ε":29723,"##ζ":29724,"##θ":29725,"##κ":29726,"##λ":29727,"##μ":29728,"##ξ":29729,"##ο":29730,"##π":29731,"##ρ":29732,"##σ":29733,"##τ":29734,"##υ":29735,"##φ":29736,"##χ":29737,"##ψ":29738,"##ω":29739,"##б":29740,"##г":29741,"##д":29742,"##ж":29743,"##з":29744,"##м":29745,"##п":29746,"##с":29747,"##у":29748,"##ф":29749,"##х":29750,"##ц":29751,"##ч":29752,"##ш":29753,"##щ":29754,"##ъ":29755,"##э":29756,"##ю":29757,"##ђ":29758,"##є":29759,"##і":29760,"##ј":29761,"##љ":29762,"##њ":29763,"##ћ":29764,"##ӏ":29765,"##ա":29766,"##բ":29767,"##գ":29768,"##դ":29769,"##ե":29770,"##թ":29771,"##ի":29772,"##լ":29773,"##կ":29774,"##հ":29775,"##մ":29776,"##յ":29777,"##ն":29778,"##ո":29779,"##պ":29780,"##ս":29781,"##վ":29782,"##տ":29783,"##ր":29784,"##ւ":29785,"##ք":29786,"##־":29787,"##א":29788,"##ב":29789,"##ג":29790,"##ד":29791,"##ו":29792,"##ז":29793,"##ח":29794,"##ט":29795,"##י":29796,"##ך":29797,"##כ":29798,"##ל":29799,"##ם":29800,"##מ":29801,"##ן":29802,"##נ":29803,"##ס":29804,"##ע":29805,"##ף":29806,"##פ":29807,"##ץ":29808,"##צ":29809,"##ק":29810,"##ר":29811,"##ש":29812,"##ת":29813,"##،":29814,"##ء":29815,"##ب":29816,"##ت":29817,"##ث":29818,"##ج":29819,"##ح":29820,"##خ":29821,"##ذ":29822,"##ز":29823,"##س":29824,"##ش":29825,"##ص":29826,"##ض":29827,"##ط":29828,"##ظ":29829,"##ع":29830,"##غ":29831,"##ـ":29832,"##ف":29833,"##ق":29834,"##ك":29835,"##و":29836,"##ى":29837,"##ٹ":29838,"##پ":29839,"##چ":29840,"##ک":29841,"##گ":29842,"##ں":29843,"##ھ":29844,"##ہ":29845,"##ے":29846,"##अ":29847,"##आ":29848,"##उ":29849,"##ए":29850,"##क":29851,"##ख":29852,"##ग":29853,"##च":29854,"##ज":29855,"##ट":29856,"##ड":29857,"##ण":29858,"##त":29859,"##थ":29860,"##द":29861,"##ध":29862,"##न":29863,"##प":29864,"##ब":29865,"##भ":29866,"##म":29867,"##य":29868,"##र":29869,"##ल":29870,"##व":29871,"##श":29872,"##ष":29873,"##स":29874,"##ह":29875,"##ा":29876,"##ि":29877,"##ी":29878,"##ो":29879,"##।":29880,"##॥":29881,"##ং":29882,"##অ":29883,"##আ":29884,"##ই":29885,"##উ":29886,"##এ":29887,"##ও":29888,"##ক":29889,"##খ":29890,"##গ":29891,"##চ":29892,"##ছ":29893,"##জ":29894,"##ট":29895,"##ড":29896,"##ণ":29897,"##ত":29898,"##থ":29899,"##দ":29900,"##ধ":29901,"##ন":29902,"##প":29903,"##ব":29904,"##ভ":29905,"##ম":29906,"##য":29907,"##র":29908,"##ল":29909,"##শ":29910,"##ষ":29911,"##স":29912,"##হ":29913,"##া":29914,"##ি":29915,"##ী":29916,"##ে":29917,"##க":29918,"##ச":29919,"##ட":29920,"##த":29921,"##ந":29922,"##ன":29923,"##ப":29924,"##ம":29925,"##ய":29926,"##ர":29927,"##ல":29928,"##ள":29929,"##வ":29930,"##ா":29931,"##ி":29932,"##ு":29933,"##ே":29934,"##ை":29935,"##ನ":29936,"##ರ":29937,"##ಾ":29938,"##ක":29939,"##ය":29940,"##ර":29941,"##ල":29942,"##ව":29943,"##ා":29944,"##ก":29945,"##ง":29946,"##ต":29947,"##ท":29948,"##น":29949,"##พ":29950,"##ม":29951,"##ย":29952,"##ร":29953,"##ล":29954,"##ว":29955,"##ส":29956,"##อ":29957,"##า":29958,"##เ":29959,"##་":29960,"##།":29961,"##ག":29962,"##ང":29963,"##ད":29964,"##ན":29965,"##པ":29966,"##བ":29967,"##མ":29968,"##འ":29969,"##ར":29970,"##ལ":29971,"##ས":29972,"##မ":29973,"##ა":29974,"##ბ":29975,"##გ":29976,"##დ":29977,"##ე":29978,"##ვ":29979,"##თ":29980,"##ი":29981,"##კ":29982,"##ლ":29983,"##მ":29984,"##ნ":29985,"##ო":29986,"##რ":29987,"##ს":29988,"##ტ":29989,"##უ":29990,"##ᄀ":29991,"##ᄂ":29992,"##ᄃ":29993,"##ᄅ":29994,"##ᄆ":29995,"##ᄇ":29996,"##ᄉ":29997,"##ᄊ":29998,"##ᄋ":29999,"##ᄌ":30000,"##ᄎ":30001,"##ᄏ":30002,"##ᄐ":30003,"##ᄑ":30004,"##ᄒ":30005,"##ᅡ":30006,"##ᅢ":30007,"##ᅥ":30008,"##ᅦ":30009,"##ᅧ":30010,"##ᅩ":30011,"##ᅪ":30012,"##ᅭ":30013,"##ᅮ":30014,"##ᅯ":30015,"##ᅲ":30016,"##ᅳ":30017,"##ᅴ":30018,"##ᅵ":30019,"##ᆨ":30020,"##ᆫ":30021,"##ᆯ":30022,"##ᆷ":30023,"##ᆸ":30024,"##ᆼ":30025,"##ᴬ":30026,"##ᴮ":30027,"##ᴰ":30028,"##ᴵ":30029,"##ᴺ":30030,"##ᵀ":30031,"##ᵃ":30032,"##ᵇ":30033,"##ᵈ":30034,"##ᵉ":30035,"##ᵍ":30036,"##ᵏ":30037,"##ᵐ":30038,"##ᵒ":30039,"##ᵖ":30040,"##ᵗ":30041,"##ᵘ":30042,"##ᵣ":30043,"##ᵤ":30044,"##ᵥ":30045,"##ᶜ":30046,"##ᶠ":30047,"##‐":30048,"##‑":30049,"##‒":30050,"##–":30051,"##—":30052,"##―":30053,"##‖":30054,"##‘":30055,"##’":30056,"##‚":30057,"##“":30058,"##”":30059,"##„":30060,"##†":30061,"##‡":30062,"##•":30063,"##…":30064,"##‰":30065,"##′":30066,"##″":30067,"##›":30068,"##‿":30069,"##⁄":30070,"##⁰":30071,"##ⁱ":30072,"##⁴":30073,"##⁵":30074,"##⁶":30075,"##⁷":30076,"##⁸":30077,"##⁹":30078,"##⁻":30079,"##ⁿ":30080,"##₅":30081,"##₆":30082,"##₇":30083,"##₈":30084,"##₉":30085,"##₊":30086,"##₍":30087,"##₎":30088,"##ₐ":30089,"##ₑ":30090,"##ₒ":30091,"##ₓ":30092,"##ₕ":30093,"##ₖ":30094,"##ₗ":30095,"##ₘ":30096,"##ₚ":30097,"##ₛ":30098,"##ₜ":30099,"##₤":30100,"##₩":30101,"##€":30102,"##₱":30103,"##₹":30104,"##ℓ":30105,"##№":30106,"##ℝ":30107,"##™":30108,"##⅓":30109,"##⅔":30110,"##←":30111,"##↑":30112,"##→":30113,"##↓":30114,"##↔":30115,"##↦":30116,"##⇄":30117,"##⇌":30118,"##⇒":30119,"##∂":30120,"##∅":30121,"##∆":30122,"##∇":30123,"##∈":30124,"##∗":30125,"##∘":30126,"##√":30127,"##∞":30128,"##∧":30129,"##∨":30130,"##∩":30131,"##∪":30132,"##≈":30133,"##≡":30134,"##≤":30135,"##≥":30136,"##⊂":30137,"##⊆":30138,"##⊕":30139,"##⊗":30140,"##⋅":30141,"##─":30142,"##│":30143,"##■":30144,"##▪":30145,"##●":30146,"##★":30147,"##☆":30148,"##☉":30149,"##♠":30150,"##♣":30151,"##♥":30152,"##♦":30153,"##♯":30154,"##⟨":30155,"##⟩":30156,"##ⱼ":30157,"##⺩":30158,"##⺼":30159,"##⽥":30160,"##、":30161,"##。":30162,"##〈":30163,"##〉":30164,"##《":30165,"##》":30166,"##「":30167,"##」":30168,"##『":30169,"##』":30170,"##〜":30171,"##あ":30172,"##い":30173,"##う":30174,"##え":30175,"##お":30176,"##か":30177,"##き":30178,"##く":30179,"##け":30180,"##こ":30181,"##さ":30182,"##し":30183,"##す":30184,"##せ":30185,"##そ":30186,"##た":30187,"##ち":30188,"##っ":30189,"##つ":30190,"##て":30191,"##と":30192,"##な":30193,"##に":30194,"##ぬ":30195,"##ね":30196,"##の":30197,"##は":30198,"##ひ":30199,"##ふ":30200,"##へ":30201,"##ほ":30202,"##ま":30203,"##み":30204,"##む":30205,"##め":30206,"##も":30207,"##や":30208,"##ゆ":30209,"##よ":30210,"##ら":30211,"##り":30212,"##る":30213,"##れ":30214,"##ろ":30215,"##を":30216,"##ん":30217,"##ァ":30218,"##ア":30219,"##ィ":30220,"##イ":30221,"##ウ":30222,"##ェ":30223,"##エ":30224,"##オ":30225,"##カ":30226,"##キ":30227,"##ク":30228,"##ケ":30229,"##コ":30230,"##サ":30231,"##シ":30232,"##ス":30233,"##セ":30234,"##タ":30235,"##チ":30236,"##ッ":30237,"##ツ":30238,"##テ":30239,"##ト":30240,"##ナ":30241,"##ニ":30242,"##ノ":30243,"##ハ":30244,"##ヒ":30245,"##フ":30246,"##ヘ":30247,"##ホ":30248,"##マ":30249,"##ミ":30250,"##ム":30251,"##メ":30252,"##モ":30253,"##ャ":30254,"##ュ":30255,"##ョ":30256,"##ラ":30257,"##リ":30258,"##ル":30259,"##レ":30260,"##ロ":30261,"##ワ":30262,"##ン":30263,"##・":30264,"##ー":30265,"##一":30266,"##三":30267,"##上":30268,"##下":30269,"##不":30270,"##世":30271,"##中":30272,"##主":30273,"##久":30274,"##之":30275,"##也":30276,"##事":30277,"##二":30278,"##五":30279,"##井":30280,"##京":30281,"##人":30282,"##亻":30283,"##仁":30284,"##介":30285,"##代":30286,"##仮":30287,"##伊":30288,"##会":30289,"##佐":30290,"##侍":30291,"##保":30292,"##信":30293,"##健":30294,"##元":30295,"##光":30296,"##八":30297,"##公":30298,"##内":30299,"##出":30300,"##分":30301,"##前":30302,"##劉":30303,"##力":30304,"##加":30305,"##勝":30306,"##北":30307,"##区":30308,"##十":30309,"##千":30310,"##南":30311,"##博":30312,"##原":30313,"##口":30314,"##古":30315,"##史":30316,"##司":30317,"##合":30318,"##吉":30319,"##同":30320,"##名":30321,"##和":30322,"##囗":30323,"##四":30324,"##国":30325,"##國":30326,"##土":30327,"##地":30328,"##坂":30329,"##城":30330,"##堂":30331,"##場":30332,"##士":30333,"##夏":30334,"##外":30335,"##大":30336,"##天":30337,"##太":30338,"##夫":30339,"##奈":30340,"##女":30341,"##子":30342,"##学":30343,"##宀":30344,"##宇":30345,"##安":30346,"##宗":30347,"##定":30348,"##宣":30349,"##宮":30350,"##家":30351,"##宿":30352,"##寺":30353,"##將":30354,"##小":30355,"##尚":30356,"##山":30357,"##岡":30358,"##島":30359,"##崎":30360,"##川":30361,"##州":30362,"##巿":30363,"##帝":30364,"##平":30365,"##年":30366,"##幸":30367,"##广":30368,"##弘":30369,"##張":30370,"##彳":30371,"##後":30372,"##御":30373,"##德":30374,"##心":30375,"##忄":30376,"##志":30377,"##忠":30378,"##愛":30379,"##成":30380,"##我":30381,"##戦":30382,"##戸":30383,"##手":30384,"##扌":30385,"##政":30386,"##文":30387,"##新":30388,"##方":30389,"##日":30390,"##明":30391,"##星":30392,"##春":30393,"##昭":30394,"##智":30395,"##曲":30396,"##書":30397,"##月":30398,"##有":30399,"##朝":30400,"##木":30401,"##本":30402,"##李":30403,"##村":30404,"##東":30405,"##松":30406,"##林":30407,"##森":30408,"##楊":30409,"##樹":30410,"##橋":30411,"##歌":30412,"##止":30413,"##正":30414,"##武":30415,"##比":30416,"##氏":30417,"##民":30418,"##水":30419,"##氵":30420,"##氷":30421,"##永":30422,"##江":30423,"##沢":30424,"##河":30425,"##治":30426,"##法":30427,"##海":30428,"##清":30429,"##漢":30430,"##瀬":30431,"##火":30432,"##版":30433,"##犬":30434,"##王":30435,"##生":30436,"##田":30437,"##男":30438,"##疒":30439,"##発":30440,"##白":30441,"##的":30442,"##皇":30443,"##目":30444,"##相":30445,"##省":30446,"##真":30447,"##石":30448,"##示":30449,"##社":30450,"##神":30451,"##福":30452,"##禾":30453,"##秀":30454,"##秋":30455,"##空":30456,"##立":30457,"##章":30458,"##竹":30459,"##糹":30460,"##美":30461,"##義":30462,"##耳":30463,"##良":30464,"##艹":30465,"##花":30466,"##英":30467,"##華":30468,"##葉":30469,"##藤":30470,"##行":30471,"##街":30472,"##西":30473,"##見":30474,"##訁":30475,"##語":30476,"##谷":30477,"##貝":30478,"##貴":30479,"##車":30480,"##軍":30481,"##辶":30482,"##道":30483,"##郎":30484,"##郡":30485,"##部":30486,"##都":30487,"##里":30488,"##野":30489,"##金":30490,"##鈴":30491,"##镇":30492,"##長":30493,"##門":30494,"##間":30495,"##阝":30496,"##阿":30497,"##陳":30498,"##陽":30499,"##雄":30500,"##青":30501,"##面":30502,"##風":30503,"##食":30504,"##香":30505,"##馬":30506,"##高":30507,"##龍":30508,"##龸":30509,"##fi":30510,"##fl":30511,"##!":30512,"##(":30513,"##)":30514,"##,":30515,"##-":30516,"##.":30517,"##/":30518,"##:":30519,"##?":30520,"##~":30521}}} \ No newline at end of file diff --git a/extras/BLIP/models/bert_tokenizer/tokenizer_config.json b/extras/BLIP/models/bert_tokenizer/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..a661b1a138dac6dc5590367402d100765010ffd6 --- /dev/null +++ b/extras/BLIP/models/bert_tokenizer/tokenizer_config.json @@ -0,0 +1,3 @@ +{ + "do_lower_case": true +} diff --git a/extras/BLIP/models/bert_tokenizer/vocab.txt b/extras/BLIP/models/bert_tokenizer/vocab.txt new file mode 100644 index 0000000000000000000000000000000000000000..fb140275c155a9c7c5a3b3e0e77a9e839594a938 --- /dev/null +++ b/extras/BLIP/models/bert_tokenizer/vocab.txt @@ -0,0 +1,30522 @@ +[PAD] +[unused0] +[unused1] +[unused2] +[unused3] +[unused4] +[unused5] +[unused6] +[unused7] +[unused8] +[unused9] +[unused10] +[unused11] +[unused12] +[unused13] +[unused14] +[unused15] +[unused16] +[unused17] +[unused18] +[unused19] +[unused20] +[unused21] +[unused22] +[unused23] +[unused24] +[unused25] +[unused26] +[unused27] +[unused28] +[unused29] +[unused30] +[unused31] +[unused32] +[unused33] +[unused34] +[unused35] +[unused36] +[unused37] +[unused38] +[unused39] +[unused40] +[unused41] +[unused42] +[unused43] +[unused44] +[unused45] +[unused46] +[unused47] +[unused48] +[unused49] +[unused50] +[unused51] +[unused52] +[unused53] +[unused54] +[unused55] +[unused56] +[unused57] +[unused58] +[unused59] +[unused60] +[unused61] +[unused62] +[unused63] +[unused64] +[unused65] +[unused66] +[unused67] +[unused68] +[unused69] +[unused70] +[unused71] +[unused72] +[unused73] +[unused74] +[unused75] +[unused76] +[unused77] +[unused78] +[unused79] +[unused80] +[unused81] +[unused82] +[unused83] +[unused84] +[unused85] +[unused86] +[unused87] +[unused88] +[unused89] +[unused90] +[unused91] +[unused92] +[unused93] +[unused94] +[unused95] +[unused96] +[unused97] +[unused98] +[UNK] +[CLS] +[SEP] +[MASK] +[unused99] +[unused100] +[unused101] +[unused102] +[unused103] +[unused104] +[unused105] +[unused106] +[unused107] +[unused108] +[unused109] +[unused110] +[unused111] +[unused112] +[unused113] +[unused114] +[unused115] +[unused116] +[unused117] +[unused118] +[unused119] +[unused120] +[unused121] +[unused122] +[unused123] +[unused124] +[unused125] +[unused126] +[unused127] +[unused128] +[unused129] +[unused130] +[unused131] +[unused132] +[unused133] +[unused134] +[unused135] +[unused136] +[unused137] +[unused138] +[unused139] +[unused140] +[unused141] +[unused142] +[unused143] +[unused144] +[unused145] +[unused146] +[unused147] +[unused148] +[unused149] +[unused150] +[unused151] +[unused152] +[unused153] +[unused154] +[unused155] +[unused156] +[unused157] +[unused158] +[unused159] +[unused160] +[unused161] +[unused162] +[unused163] +[unused164] +[unused165] +[unused166] +[unused167] +[unused168] +[unused169] +[unused170] +[unused171] +[unused172] +[unused173] +[unused174] +[unused175] +[unused176] +[unused177] +[unused178] +[unused179] +[unused180] +[unused181] +[unused182] +[unused183] +[unused184] +[unused185] +[unused186] +[unused187] +[unused188] +[unused189] +[unused190] +[unused191] +[unused192] +[unused193] +[unused194] +[unused195] +[unused196] +[unused197] +[unused198] +[unused199] +[unused200] +[unused201] +[unused202] +[unused203] +[unused204] +[unused205] +[unused206] +[unused207] +[unused208] +[unused209] +[unused210] +[unused211] +[unused212] +[unused213] +[unused214] +[unused215] +[unused216] +[unused217] +[unused218] +[unused219] +[unused220] +[unused221] +[unused222] +[unused223] +[unused224] +[unused225] +[unused226] +[unused227] +[unused228] +[unused229] +[unused230] +[unused231] +[unused232] +[unused233] +[unused234] +[unused235] +[unused236] +[unused237] +[unused238] +[unused239] +[unused240] +[unused241] +[unused242] +[unused243] +[unused244] +[unused245] +[unused246] +[unused247] +[unused248] +[unused249] +[unused250] +[unused251] +[unused252] +[unused253] +[unused254] +[unused255] +[unused256] +[unused257] +[unused258] +[unused259] +[unused260] +[unused261] +[unused262] +[unused263] +[unused264] +[unused265] +[unused266] +[unused267] +[unused268] +[unused269] +[unused270] +[unused271] +[unused272] +[unused273] +[unused274] +[unused275] +[unused276] +[unused277] +[unused278] +[unused279] +[unused280] +[unused281] +[unused282] +[unused283] +[unused284] +[unused285] +[unused286] +[unused287] +[unused288] +[unused289] +[unused290] +[unused291] +[unused292] +[unused293] +[unused294] +[unused295] +[unused296] +[unused297] +[unused298] +[unused299] +[unused300] +[unused301] +[unused302] +[unused303] +[unused304] +[unused305] +[unused306] +[unused307] +[unused308] +[unused309] +[unused310] +[unused311] +[unused312] +[unused313] +[unused314] +[unused315] +[unused316] +[unused317] +[unused318] +[unused319] +[unused320] +[unused321] +[unused322] +[unused323] +[unused324] +[unused325] +[unused326] +[unused327] +[unused328] +[unused329] +[unused330] +[unused331] +[unused332] +[unused333] +[unused334] +[unused335] +[unused336] +[unused337] +[unused338] +[unused339] +[unused340] +[unused341] +[unused342] +[unused343] +[unused344] +[unused345] +[unused346] +[unused347] +[unused348] +[unused349] +[unused350] +[unused351] +[unused352] +[unused353] +[unused354] +[unused355] +[unused356] +[unused357] +[unused358] +[unused359] +[unused360] +[unused361] +[unused362] +[unused363] +[unused364] +[unused365] +[unused366] +[unused367] +[unused368] +[unused369] +[unused370] +[unused371] +[unused372] +[unused373] +[unused374] +[unused375] +[unused376] +[unused377] +[unused378] +[unused379] +[unused380] +[unused381] +[unused382] +[unused383] +[unused384] +[unused385] +[unused386] +[unused387] +[unused388] +[unused389] +[unused390] +[unused391] +[unused392] +[unused393] +[unused394] +[unused395] +[unused396] +[unused397] +[unused398] +[unused399] +[unused400] +[unused401] +[unused402] +[unused403] +[unused404] +[unused405] +[unused406] +[unused407] +[unused408] +[unused409] +[unused410] +[unused411] +[unused412] +[unused413] +[unused414] +[unused415] +[unused416] +[unused417] +[unused418] +[unused419] +[unused420] +[unused421] +[unused422] +[unused423] +[unused424] +[unused425] +[unused426] +[unused427] +[unused428] +[unused429] +[unused430] +[unused431] +[unused432] +[unused433] +[unused434] +[unused435] +[unused436] +[unused437] +[unused438] +[unused439] +[unused440] +[unused441] +[unused442] +[unused443] +[unused444] +[unused445] +[unused446] +[unused447] +[unused448] +[unused449] +[unused450] +[unused451] +[unused452] +[unused453] +[unused454] +[unused455] +[unused456] +[unused457] +[unused458] +[unused459] +[unused460] +[unused461] +[unused462] +[unused463] +[unused464] +[unused465] +[unused466] +[unused467] +[unused468] +[unused469] +[unused470] +[unused471] +[unused472] +[unused473] +[unused474] +[unused475] +[unused476] +[unused477] +[unused478] +[unused479] +[unused480] +[unused481] +[unused482] +[unused483] +[unused484] +[unused485] +[unused486] +[unused487] +[unused488] +[unused489] +[unused490] +[unused491] +[unused492] +[unused493] +[unused494] +[unused495] +[unused496] +[unused497] +[unused498] +[unused499] +[unused500] +[unused501] +[unused502] +[unused503] +[unused504] +[unused505] +[unused506] +[unused507] +[unused508] +[unused509] +[unused510] +[unused511] +[unused512] +[unused513] +[unused514] +[unused515] +[unused516] +[unused517] +[unused518] +[unused519] +[unused520] +[unused521] +[unused522] +[unused523] +[unused524] +[unused525] +[unused526] +[unused527] +[unused528] +[unused529] +[unused530] +[unused531] +[unused532] +[unused533] +[unused534] +[unused535] +[unused536] +[unused537] +[unused538] +[unused539] +[unused540] +[unused541] +[unused542] +[unused543] +[unused544] +[unused545] +[unused546] +[unused547] +[unused548] +[unused549] +[unused550] +[unused551] +[unused552] +[unused553] +[unused554] +[unused555] +[unused556] +[unused557] +[unused558] +[unused559] +[unused560] +[unused561] +[unused562] +[unused563] +[unused564] +[unused565] +[unused566] +[unused567] +[unused568] +[unused569] +[unused570] +[unused571] +[unused572] +[unused573] +[unused574] +[unused575] +[unused576] +[unused577] +[unused578] +[unused579] +[unused580] +[unused581] +[unused582] +[unused583] +[unused584] +[unused585] +[unused586] +[unused587] +[unused588] +[unused589] +[unused590] +[unused591] +[unused592] +[unused593] +[unused594] +[unused595] +[unused596] +[unused597] +[unused598] +[unused599] +[unused600] +[unused601] +[unused602] +[unused603] +[unused604] +[unused605] +[unused606] +[unused607] +[unused608] +[unused609] +[unused610] +[unused611] +[unused612] +[unused613] +[unused614] +[unused615] +[unused616] +[unused617] +[unused618] +[unused619] +[unused620] +[unused621] +[unused622] +[unused623] +[unused624] +[unused625] +[unused626] +[unused627] +[unused628] +[unused629] +[unused630] +[unused631] +[unused632] +[unused633] +[unused634] +[unused635] +[unused636] +[unused637] +[unused638] +[unused639] +[unused640] +[unused641] +[unused642] +[unused643] +[unused644] +[unused645] +[unused646] +[unused647] +[unused648] +[unused649] +[unused650] +[unused651] +[unused652] +[unused653] +[unused654] +[unused655] +[unused656] +[unused657] +[unused658] +[unused659] +[unused660] +[unused661] +[unused662] +[unused663] +[unused664] +[unused665] +[unused666] +[unused667] +[unused668] +[unused669] +[unused670] +[unused671] +[unused672] +[unused673] +[unused674] +[unused675] +[unused676] +[unused677] +[unused678] +[unused679] +[unused680] +[unused681] +[unused682] +[unused683] +[unused684] +[unused685] +[unused686] +[unused687] +[unused688] +[unused689] +[unused690] +[unused691] +[unused692] +[unused693] +[unused694] +[unused695] +[unused696] +[unused697] +[unused698] +[unused699] +[unused700] +[unused701] +[unused702] +[unused703] +[unused704] +[unused705] +[unused706] +[unused707] +[unused708] +[unused709] +[unused710] +[unused711] +[unused712] +[unused713] +[unused714] +[unused715] +[unused716] +[unused717] +[unused718] +[unused719] +[unused720] +[unused721] +[unused722] +[unused723] +[unused724] +[unused725] +[unused726] +[unused727] +[unused728] +[unused729] +[unused730] +[unused731] +[unused732] +[unused733] +[unused734] +[unused735] +[unused736] +[unused737] +[unused738] +[unused739] +[unused740] +[unused741] +[unused742] +[unused743] +[unused744] +[unused745] +[unused746] +[unused747] +[unused748] +[unused749] +[unused750] +[unused751] +[unused752] +[unused753] +[unused754] +[unused755] +[unused756] +[unused757] +[unused758] +[unused759] +[unused760] +[unused761] +[unused762] +[unused763] +[unused764] +[unused765] +[unused766] +[unused767] +[unused768] +[unused769] +[unused770] +[unused771] +[unused772] +[unused773] +[unused774] +[unused775] +[unused776] +[unused777] +[unused778] +[unused779] +[unused780] +[unused781] +[unused782] +[unused783] +[unused784] +[unused785] +[unused786] +[unused787] +[unused788] +[unused789] +[unused790] +[unused791] +[unused792] +[unused793] +[unused794] +[unused795] +[unused796] +[unused797] +[unused798] +[unused799] +[unused800] +[unused801] +[unused802] +[unused803] +[unused804] +[unused805] +[unused806] +[unused807] +[unused808] +[unused809] +[unused810] +[unused811] +[unused812] +[unused813] +[unused814] +[unused815] +[unused816] +[unused817] +[unused818] +[unused819] +[unused820] +[unused821] +[unused822] +[unused823] +[unused824] +[unused825] +[unused826] +[unused827] +[unused828] +[unused829] +[unused830] +[unused831] +[unused832] +[unused833] +[unused834] +[unused835] +[unused836] +[unused837] +[unused838] +[unused839] +[unused840] +[unused841] +[unused842] +[unused843] +[unused844] +[unused845] +[unused846] +[unused847] +[unused848] +[unused849] +[unused850] +[unused851] +[unused852] +[unused853] +[unused854] +[unused855] +[unused856] +[unused857] +[unused858] +[unused859] +[unused860] +[unused861] +[unused862] +[unused863] +[unused864] +[unused865] +[unused866] +[unused867] +[unused868] +[unused869] +[unused870] +[unused871] +[unused872] +[unused873] +[unused874] +[unused875] +[unused876] +[unused877] +[unused878] +[unused879] +[unused880] +[unused881] +[unused882] +[unused883] +[unused884] +[unused885] +[unused886] +[unused887] +[unused888] +[unused889] +[unused890] +[unused891] +[unused892] +[unused893] +[unused894] +[unused895] +[unused896] +[unused897] +[unused898] +[unused899] +[unused900] +[unused901] +[unused902] +[unused903] +[unused904] +[unused905] +[unused906] +[unused907] +[unused908] +[unused909] +[unused910] +[unused911] +[unused912] +[unused913] +[unused914] +[unused915] +[unused916] +[unused917] +[unused918] +[unused919] +[unused920] +[unused921] +[unused922] +[unused923] +[unused924] +[unused925] +[unused926] +[unused927] +[unused928] +[unused929] +[unused930] +[unused931] +[unused932] +[unused933] +[unused934] +[unused935] +[unused936] +[unused937] +[unused938] +[unused939] +[unused940] +[unused941] +[unused942] +[unused943] +[unused944] +[unused945] +[unused946] +[unused947] +[unused948] +[unused949] +[unused950] +[unused951] +[unused952] +[unused953] +[unused954] +[unused955] +[unused956] +[unused957] +[unused958] +[unused959] +[unused960] +[unused961] +[unused962] +[unused963] +[unused964] +[unused965] +[unused966] +[unused967] +[unused968] +[unused969] +[unused970] +[unused971] +[unused972] +[unused973] +[unused974] +[unused975] +[unused976] +[unused977] +[unused978] +[unused979] +[unused980] +[unused981] +[unused982] +[unused983] +[unused984] +[unused985] +[unused986] +[unused987] +[unused988] +[unused989] +[unused990] +[unused991] +[unused992] +[unused993] +! +" +# +$ +% +& +' +( +) +* ++ +, +- +. +/ +0 +1 +2 +3 +4 +5 +6 +7 +8 +9 +: +; +< += +> +? +@ +[ +\ +] +^ +_ +` +a +b +c +d +e +f +g +h +i +j +k +l +m +n +o +p +q +r +s +t +u +v +w +x +y +z +{ +| +} +~ +¡ +¢ +£ +¤ +¥ +¦ +§ +¨ +© +ª +« +¬ +® +° +± +² +³ +´ +µ +¶ +· +¹ +º +» +¼ +½ +¾ +¿ +× +ß +æ +ð +÷ +ø +þ +đ +ħ +ı +ł +ŋ +œ +ƒ +ɐ +ɑ +ɒ +ɔ +ɕ +ə +ɛ +ɡ +ɣ +ɨ +ɪ +ɫ +ɬ +ɯ +ɲ +ɴ +ɹ +ɾ +ʀ +ʁ +ʂ +ʃ +ʉ +ʊ +ʋ +ʌ +ʎ +ʐ +ʑ +ʒ +ʔ +ʰ +ʲ +ʳ +ʷ +ʸ +ʻ +ʼ +ʾ +ʿ +ˈ +ː +ˡ +ˢ +ˣ +ˤ +α +β +γ +δ +ε +ζ +η +θ +ι +κ +λ +μ +ν +ξ +ο +π +ρ +ς +σ +τ +υ +φ +χ +ψ +ω +а +б +в +г +д +е +ж +з +и +к +л +м +н +о +п +р +с +т +у +ф +х +ц +ч +ш +щ +ъ +ы +ь +э +ю +я +ђ +є +і +ј +љ +њ +ћ +ӏ +ա +բ +գ +դ +ե +թ +ի +լ +կ +հ +մ +յ +ն +ո +պ +ս +վ +տ +ր +ւ +ք +־ +א +ב +ג +ד +ה +ו +ז +ח +ט +י +ך +כ +ל +ם +מ +ן +נ +ס +ע +ף +פ +ץ +צ +ק +ר +ש +ת +، +ء +ا +ب +ة +ت +ث +ج +ح +خ +د +ذ +ر +ز +س +ش +ص +ض +ط +ظ +ع +غ +ـ +ف +ق +ك +ل +م +ن +ه +و +ى +ي +ٹ +پ +چ +ک +گ +ں +ھ +ہ +ی +ے +अ +आ +उ +ए +क +ख +ग +च +ज +ट +ड +ण +त +थ +द +ध +न +प +ब +भ +म +य +र +ल +व +श +ष +स +ह +ा +ि +ी +ो +। +॥ +ং +অ +আ +ই +উ +এ +ও +ক +খ +গ +চ +ছ +জ +ট +ড +ণ +ত +থ +দ +ধ +ন +প +ব +ভ +ম +য +র +ল +শ +ষ +স +হ +া +ি +ী +ে +க +ச +ட +த +ந +ன +ப +ம +ய +ர +ல +ள +வ +ா +ி +ு +ே +ை +ನ +ರ +ಾ +ක +ය +ර +ල +ව +ා +ก +ง +ต +ท +น +พ +ม +ย +ร +ล +ว +ส +อ +า +เ +་ +། +ག +ང +ད +ན +པ +བ +མ +འ +ར +ལ +ས +မ +ა +ბ +გ +დ +ე +ვ +თ +ი +კ +ლ +მ +ნ +ო +რ +ს +ტ +უ +ᄀ +ᄂ +ᄃ +ᄅ +ᄆ +ᄇ +ᄉ +ᄊ +ᄋ +ᄌ +ᄎ +ᄏ +ᄐ +ᄑ +ᄒ +ᅡ +ᅢ +ᅥ +ᅦ +ᅧ +ᅩ +ᅪ +ᅭ +ᅮ +ᅯ +ᅲ +ᅳ +ᅴ +ᅵ +ᆨ +ᆫ +ᆯ +ᆷ +ᆸ +ᆼ +ᴬ +ᴮ +ᴰ +ᴵ +ᴺ +ᵀ +ᵃ +ᵇ +ᵈ +ᵉ +ᵍ +ᵏ +ᵐ +ᵒ +ᵖ +ᵗ +ᵘ +ᵢ +ᵣ +ᵤ +ᵥ +ᶜ +ᶠ +‐ +‑ +‒ +– +— +― +‖ +‘ +’ +‚ +“ +” +„ +† +‡ +• +… +‰ +′ +″ +› +‿ +⁄ +⁰ +ⁱ +⁴ +⁵ +⁶ +⁷ +⁸ +⁹ +⁺ +⁻ +ⁿ +₀ +₁ +₂ +₃ +₄ +₅ +₆ +₇ +₈ +₉ +₊ +₍ +₎ +ₐ +ₑ +ₒ +ₓ +ₕ +ₖ +ₗ +ₘ +ₙ +ₚ +ₛ +ₜ +₤ +₩ +€ +₱ +₹ +ℓ +№ +ℝ +™ +⅓ +⅔ +← +↑ +→ +↓ +↔ +↦ +⇄ +⇌ +⇒ +∂ +∅ +∆ +∇ +∈ +− +∗ +∘ +√ +∞ +∧ +∨ +∩ +∪ +≈ +≡ +≤ +≥ +⊂ +⊆ +⊕ +⊗ +⋅ +─ +│ +■ +▪ +● +★ +☆ +☉ +♠ +♣ +♥ +♦ +♭ +♯ +⟨ +⟩ +ⱼ +⺩ +⺼ +⽥ +、 +。 +〈 +〉 +《 +》 +「 +」 +『 +』 +〜 +あ +い +う +え +お +か +き +く +け +こ +さ +し +す +せ +そ +た +ち +っ +つ +て +と +な +に +ぬ +ね +の +は +ひ +ふ +へ +ほ +ま +み +む +め +も +や +ゆ +よ +ら +り +る +れ +ろ +を +ん +ァ +ア +ィ +イ +ウ +ェ +エ +オ +カ +キ +ク +ケ +コ +サ +シ +ス +セ +タ +チ +ッ +ツ +テ +ト +ナ +ニ +ノ +ハ +ヒ +フ +ヘ +ホ +マ +ミ +ム +メ +モ +ャ +ュ +ョ +ラ +リ +ル +レ +ロ +ワ +ン +・ +ー +一 +三 +上 +下 +不 +世 +中 +主 +久 +之 +也 +事 +二 +五 +井 +京 +人 +亻 +仁 +介 +代 +仮 +伊 +会 +佐 +侍 +保 +信 +健 +元 +光 +八 +公 +内 +出 +分 +前 +劉 +力 +加 +勝 +北 +区 +十 +千 +南 +博 +原 +口 +古 +史 +司 +合 +吉 +同 +名 +和 +囗 +四 +国 +國 +土 +地 +坂 +城 +堂 +場 +士 +夏 +外 +大 +天 +太 +夫 +奈 +女 +子 +学 +宀 +宇 +安 +宗 +定 +宣 +宮 +家 +宿 +寺 +將 +小 +尚 +山 +岡 +島 +崎 +川 +州 +巿 +帝 +平 +年 +幸 +广 +弘 +張 +彳 +後 +御 +德 +心 +忄 +志 +忠 +愛 +成 +我 +戦 +戸 +手 +扌 +政 +文 +新 +方 +日 +明 +星 +春 +昭 +智 +曲 +書 +月 +有 +朝 +木 +本 +李 +村 +東 +松 +林 +森 +楊 +樹 +橋 +歌 +止 +正 +武 +比 +氏 +民 +水 +氵 +氷 +永 +江 +沢 +河 +治 +法 +海 +清 +漢 +瀬 +火 +版 +犬 +王 +生 +田 +男 +疒 +発 +白 +的 +皇 +目 +相 +省 +真 +石 +示 +社 +神 +福 +禾 +秀 +秋 +空 +立 +章 +竹 +糹 +美 +義 +耳 +良 +艹 +花 +英 +華 +葉 +藤 +行 +街 +西 +見 +訁 +語 +谷 +貝 +貴 +車 +軍 +辶 +道 +郎 +郡 +部 +都 +里 +野 +金 +鈴 +镇 +長 +門 +間 +阝 +阿 +陳 +陽 +雄 +青 +面 +風 +食 +香 +馬 +高 +龍 +龸 +fi +fl +! +( +) +, +- +. +/ +: +? +~ +the +of +and +in +to +was +he +is +as +for +on +with +that +it +his +by +at +from +her +##s +she +you +had +an +were +but +be +this +are +not +my +they +one +which +or +have +him +me +first +all +also +their +has +up +who +out +been +when +after +there +into +new +two +its +##a +time +would +no +what +about +said +we +over +then +other +so +more +##e +can +if +like +back +them +only +some +could +##i +where +just +##ing +during +before +##n +do +##o +made +school +through +than +now +years +most +world +may +between +down +well +three +##d +year +while +will +##ed +##r +##y +later +##t +city +under +around +did +such +being +used +state +people +part +know +against +your +many +second +university +both +national +##er +these +don +known +off +way +until +re +how +even +get +head +... +didn +##ly +team +american +because +de +##l +born +united +film +since +still +long +work +south +us +became +any +high +again +day +family +see +right +man +eyes +house +season +war +states +including +took +life +north +same +each +called +name +much +place +however +go +four +group +another +found +won +area +here +going +10 +away +series +left +home +music +best +make +hand +number +company +several +never +last +john +000 +very +album +take +end +good +too +following +released +game +played +little +began +district +##m +old +want +those +side +held +own +early +county +ll +league +use +west +##u +face +think +##es +2010 +government +##h +march +came +small +general +town +june +##on +line +based +something +##k +september +thought +looked +along +international +2011 +air +july +club +went +january +october +our +august +april +york +12 +few +2012 +2008 +east +show +member +college +2009 +father +public +##us +come +men +five +set +station +church +##c +next +former +november +room +party +located +december +2013 +age +got +2007 +##g +system +let +love +2006 +though +every +2014 +look +song +water +century +without +body +black +night +within +great +women +single +ve +building +large +population +river +named +band +white +started +##an +once +15 +20 +should +18 +2015 +service +top +built +british +open +death +king +moved +local +times +children +february +book +why +11 +door +need +president +order +final +road +wasn +although +due +major +died +village +third +knew +2016 +asked +turned +st +wanted +say +##p +together +received +main +son +served +different +##en +behind +himself +felt +members +power +football +law +voice +play +##in +near +park +history +30 +having +2005 +16 +##man +saw +mother +##al +army +point +front +help +english +street +art +late +hands +games +award +##ia +young +14 +put +published +country +division +across +told +13 +often +ever +french +london +center +six +red +2017 +led +days +include +light +25 +find +tell +among +species +really +according +central +half +2004 +form +original +gave +office +making +enough +lost +full +opened +must +included +live +given +german +player +run +business +woman +community +cup +might +million +land +2000 +court +development +17 +short +round +ii +km +seen +class +story +always +become +sure +research +almost +director +council +la +##2 +career +things +using +island +##z +couldn +car +##is +24 +close +force +##1 +better +free +support +control +field +students +2003 +education +married +##b +nothing +worked +others +record +big +inside +level +anything +continued +give +james +##3 +military +established +non +returned +feel +does +title +written +thing +feet +william +far +co +association +hard +already +2002 +##ra +championship +human +western +100 +##na +department +hall +role +various +production +21 +19 +heart +2001 +living +fire +version +##ers +##f +television +royal +##4 +produced +working +act +case +society +region +present +radio +period +looking +least +total +keep +england +wife +program +per +brother +mind +special +22 +##le +am +works +soon +##6 +political +george +services +taken +created +##7 +further +able +reached +david +union +joined +upon +done +important +social +information +either +##ic +##x +appeared +position +ground +lead +rock +dark +election +23 +board +france +hair +course +arms +site +police +girl +instead +real +sound +##v +words +moment +##te +someone +##8 +summer +project +announced +san +less +wrote +past +followed +##5 +blue +founded +al +finally +india +taking +records +america +##ne +1999 +design +considered +northern +god +stop +battle +toward +european +outside +described +track +today +playing +language +28 +call +26 +heard +professional +low +australia +miles +california +win +yet +green +##ie +trying +blood +##ton +southern +science +maybe +everything +match +square +27 +mouth +video +race +recorded +leave +above +##9 +daughter +points +space +1998 +museum +change +middle +common +##0 +move +tv +post +##ta +lake +seven +tried +elected +closed +ten +paul +minister +##th +months +start +chief +return +canada +person +sea +release +similar +modern +brought +rest +hit +formed +mr +##la +1997 +floor +event +doing +thomas +1996 +robert +care +killed +training +star +week +needed +turn +finished +railway +rather +news +health +sent +example +ran +term +michael +coming +currently +yes +forces +despite +gold +areas +50 +stage +fact +29 +dead +says +popular +2018 +originally +germany +probably +developed +result +pulled +friend +stood +money +running +mi +signed +word +songs +child +eventually +met +tour +average +teams +minutes +festival +current +deep +kind +1995 +decided +usually +eastern +seemed +##ness +episode +bed +added +table +indian +private +charles +route +available +idea +throughout +centre +addition +appointed +style +1994 +books +eight +construction +press +mean +wall +friends +remained +schools +study +##ch +##um +institute +oh +chinese +sometimes +events +possible +1992 +australian +type +brown +forward +talk +process +food +debut +seat +performance +committee +features +character +arts +herself +else +lot +strong +russian +range +hours +peter +arm +##da +morning +dr +sold +##ry +quickly +directed +1993 +guitar +china +##w +31 +list +##ma +performed +media +uk +players +smile +##rs +myself +40 +placed +coach +province +towards +wouldn +leading +whole +boy +official +designed +grand +census +##el +europe +attack +japanese +henry +1991 +##re +##os +cross +getting +alone +action +lower +network +wide +washington +japan +1990 +hospital +believe +changed +sister +##ar +hold +gone +sir +hadn +ship +##ka +studies +academy +shot +rights +below +base +bad +involved +kept +largest +##ist +bank +future +especially +beginning +mark +movement +section +female +magazine +plan +professor +lord +longer +##ian +sat +walked +hill +actually +civil +energy +model +families +size +thus +aircraft +completed +includes +data +captain +##or +fight +vocals +featured +richard +bridge +fourth +1989 +officer +stone +hear +##ism +means +medical +groups +management +self +lips +competition +entire +lived +technology +leaving +federal +tournament +bit +passed +hot +independent +awards +kingdom +mary +spent +fine +doesn +reported +##ling +jack +fall +raised +itself +stay +true +studio +1988 +sports +replaced +paris +systems +saint +leader +theatre +whose +market +capital +parents +spanish +canadian +earth +##ity +cut +degree +writing +bay +christian +awarded +natural +higher +bill +##as +coast +provided +previous +senior +ft +valley +organization +stopped +onto +countries +parts +conference +queen +security +interest +saying +allowed +master +earlier +phone +matter +smith +winning +try +happened +moving +campaign +los +##ley +breath +nearly +mid +1987 +certain +girls +date +italian +african +standing +fell +artist +##ted +shows +deal +mine +industry +1986 +##ng +everyone +republic +provide +collection +library +student +##ville +primary +owned +older +via +heavy +1st +makes +##able +attention +anyone +africa +##ri +stated +length +ended +fingers +command +staff +skin +foreign +opening +governor +okay +medal +kill +sun +cover +job +1985 +introduced +chest +hell +feeling +##ies +success +meet +reason +standard +meeting +novel +1984 +trade +source +buildings +##land +rose +guy +goal +##ur +chapter +native +husband +previously +unit +limited +entered +weeks +producer +operations +mountain +takes +covered +forced +related +roman +complete +successful +key +texas +cold +##ya +channel +1980 +traditional +films +dance +clear +approximately +500 +nine +van +prince +question +active +tracks +ireland +regional +silver +author +personal +sense +operation +##ine +economic +1983 +holding +twenty +isbn +additional +speed +hour +edition +regular +historic +places +whom +shook +movie +km² +secretary +prior +report +chicago +read +foundation +view +engine +scored +1982 +units +ask +airport +property +ready +immediately +lady +month +listed +contract +##de +manager +themselves +lines +##ki +navy +writer +meant +##ts +runs +##ro +practice +championships +singer +glass +commission +required +forest +starting +culture +generally +giving +access +attended +test +couple +stand +catholic +martin +caught +executive +##less +eye +##ey +thinking +chair +quite +shoulder +1979 +hope +decision +plays +defeated +municipality +whether +structure +offered +slowly +pain +ice +direction +##ion +paper +mission +1981 +mostly +200 +noted +individual +managed +nature +lives +plant +##ha +helped +except +studied +computer +figure +relationship +issue +significant +loss +die +smiled +gun +ago +highest +1972 +##am +male +bring +goals +mexico +problem +distance +commercial +completely +location +annual +famous +drive +1976 +neck +1978 +surface +caused +italy +understand +greek +highway +wrong +hotel +comes +appearance +joseph +double +issues +musical +companies +castle +income +review +assembly +bass +initially +parliament +artists +experience +1974 +particular +walk +foot +engineering +talking +window +dropped +##ter +miss +baby +boys +break +1975 +stars +edge +remember +policy +carried +train +stadium +bar +sex +angeles +evidence +##ge +becoming +assistant +soviet +1977 +upper +step +wing +1970 +youth +financial +reach +##ll +actor +numerous +##se +##st +nodded +arrived +##ation +minute +##nt +believed +sorry +complex +beautiful +victory +associated +temple +1968 +1973 +chance +perhaps +metal +##son +1945 +bishop +##et +lee +launched +particularly +tree +le +retired +subject +prize +contains +yeah +theory +empire +##ce +suddenly +waiting +trust +recording +##to +happy +terms +camp +champion +1971 +religious +pass +zealand +names +2nd +port +ancient +tom +corner +represented +watch +legal +anti +justice +cause +watched +brothers +45 +material +changes +simply +response +louis +fast +##ting +answer +60 +historical +1969 +stories +straight +create +feature +increased +rate +administration +virginia +el +activities +cultural +overall +winner +programs +basketball +legs +guard +beyond +cast +doctor +mm +flight +results +remains +cost +effect +winter +##ble +larger +islands +problems +chairman +grew +commander +isn +1967 +pay +failed +selected +hurt +fort +box +regiment +majority +journal +35 +edward +plans +##ke +##ni +shown +pretty +irish +characters +directly +scene +likely +operated +allow +spring +##j +junior +matches +looks +mike +houses +fellow +##tion +beach +marriage +##ham +##ive +rules +oil +65 +florida +expected +nearby +congress +sam +peace +recent +iii +wait +subsequently +cell +##do +variety +serving +agreed +please +poor +joe +pacific +attempt +wood +democratic +piece +prime +##ca +rural +mile +touch +appears +township +1964 +1966 +soldiers +##men +##ized +1965 +pennsylvania +closer +fighting +claimed +score +jones +physical +editor +##ous +filled +genus +specific +sitting +super +mom +##va +therefore +supported +status +fear +cases +store +meaning +wales +minor +spain +tower +focus +vice +frank +follow +parish +separate +golden +horse +fifth +remaining +branch +32 +presented +stared +##id +uses +secret +forms +##co +baseball +exactly +##ck +choice +note +discovered +travel +composed +truth +russia +ball +color +kiss +dad +wind +continue +ring +referred +numbers +digital +greater +##ns +metres +slightly +direct +increase +1960 +responsible +crew +rule +trees +troops +##no +broke +goes +individuals +hundred +weight +creek +sleep +memory +defense +provides +ordered +code +value +jewish +windows +1944 +safe +judge +whatever +corps +realized +growing +pre +##ga +cities +alexander +gaze +lies +spread +scott +letter +showed +situation +mayor +transport +watching +workers +extended +##li +expression +normal +##ment +chart +multiple +border +##ba +host +##ner +daily +mrs +walls +piano +##ko +heat +cannot +##ate +earned +products +drama +era +authority +seasons +join +grade +##io +sign +difficult +machine +1963 +territory +mainly +##wood +stations +squadron +1962 +stepped +iron +19th +##led +serve +appear +sky +speak +broken +charge +knowledge +kilometres +removed +ships +article +campus +simple +##ty +pushed +britain +##ve +leaves +recently +cd +soft +boston +latter +easy +acquired +poland +##sa +quality +officers +presence +planned +nations +mass +broadcast +jean +share +image +influence +wild +offer +emperor +electric +reading +headed +ability +promoted +yellow +ministry +1942 +throat +smaller +politician +##by +latin +spoke +cars +williams +males +lack +pop +80 +##ier +acting +seeing +consists +##ti +estate +1961 +pressure +johnson +newspaper +jr +chris +olympics +online +conditions +beat +elements +walking +vote +##field +needs +carolina +text +featuring +global +block +shirt +levels +francisco +purpose +females +et +dutch +duke +ahead +gas +twice +safety +serious +turning +highly +lieutenant +firm +maria +amount +mixed +daniel +proposed +perfect +agreement +affairs +3rd +seconds +contemporary +paid +1943 +prison +save +kitchen +label +administrative +intended +constructed +academic +nice +teacher +races +1956 +formerly +corporation +ben +nation +issued +shut +1958 +drums +housing +victoria +seems +opera +1959 +graduated +function +von +mentioned +picked +build +recognized +shortly +protection +picture +notable +exchange +elections +1980s +loved +percent +racing +fish +elizabeth +garden +volume +hockey +1941 +beside +settled +##ford +1940 +competed +replied +drew +1948 +actress +marine +scotland +steel +glanced +farm +steve +1957 +risk +tonight +positive +magic +singles +effects +gray +screen +dog +##ja +residents +bus +sides +none +secondary +literature +polish +destroyed +flying +founder +households +1939 +lay +reserve +usa +gallery +##ler +1946 +industrial +younger +approach +appearances +urban +ones +1950 +finish +avenue +powerful +fully +growth +page +honor +jersey +projects +advanced +revealed +basic +90 +infantry +pair +equipment +visit +33 +evening +search +grant +effort +solo +treatment +buried +republican +primarily +bottom +owner +1970s +israel +gives +jim +dream +bob +remain +spot +70 +notes +produce +champions +contact +ed +soul +accepted +ways +del +##ally +losing +split +price +capacity +basis +trial +questions +##ina +1955 +20th +guess +officially +memorial +naval +initial +##ization +whispered +median +engineer +##ful +sydney +##go +columbia +strength +300 +1952 +tears +senate +00 +card +asian +agent +1947 +software +44 +draw +warm +supposed +com +pro +##il +transferred +leaned +##at +candidate +escape +mountains +asia +potential +activity +entertainment +seem +traffic +jackson +murder +36 +slow +product +orchestra +haven +agency +bbc +taught +website +comedy +unable +storm +planning +albums +rugby +environment +scientific +grabbed +protect +##hi +boat +typically +1954 +1953 +damage +principal +divided +dedicated +mount +ohio +##berg +pick +fought +driver +##der +empty +shoulders +sort +thank +berlin +prominent +account +freedom +necessary +efforts +alex +headquarters +follows +alongside +des +simon +andrew +suggested +operating +learning +steps +1949 +sweet +technical +begin +easily +34 +teeth +speaking +settlement +scale +##sh +renamed +ray +max +enemy +semi +joint +compared +##rd +scottish +leadership +analysis +offers +georgia +pieces +captured +animal +deputy +guest +organized +##lin +tony +combined +method +challenge +1960s +huge +wants +battalion +sons +rise +crime +types +facilities +telling +path +1951 +platform +sit +1990s +##lo +tells +assigned +rich +pull +##ot +commonly +alive +##za +letters +concept +conducted +wearing +happen +bought +becomes +holy +gets +ocean +defeat +languages +purchased +coffee +occurred +titled +##q +declared +applied +sciences +concert +sounds +jazz +brain +##me +painting +fleet +tax +nick +##ius +michigan +count +animals +leaders +episodes +##line +content +##den +birth +##it +clubs +64 +palace +critical +refused +fair +leg +laughed +returning +surrounding +participated +formation +lifted +pointed +connected +rome +medicine +laid +taylor +santa +powers +adam +tall +shared +focused +knowing +yards +entrance +falls +##wa +calling +##ad +sources +chosen +beneath +resources +yard +##ite +nominated +silence +zone +defined +##que +gained +thirty +38 +bodies +moon +##ard +adopted +christmas +widely +register +apart +iran +premier +serves +du +unknown +parties +##les +generation +##ff +continues +quick +fields +brigade +quiet +teaching +clothes +impact +weapons +partner +flat +theater +supreme +1938 +37 +relations +##tor +plants +suffered +1936 +wilson +kids +begins +##age +1918 +seats +armed +internet +models +worth +laws +400 +communities +classes +background +knows +thanks +quarter +reaching +humans +carry +killing +format +kong +hong +setting +75 +architecture +disease +railroad +inc +possibly +wish +arthur +thoughts +harry +doors +density +##di +crowd +illinois +stomach +tone +unique +reports +anyway +##ir +liberal +der +vehicle +thick +dry +drug +faced +largely +facility +theme +holds +creation +strange +colonel +##mi +revolution +bell +politics +turns +silent +rail +relief +independence +combat +shape +write +determined +sales +learned +4th +finger +oxford +providing +1937 +heritage +fiction +situated +designated +allowing +distribution +hosted +##est +sight +interview +estimated +reduced +##ria +toronto +footballer +keeping +guys +damn +claim +motion +sport +sixth +stayed +##ze +en +rear +receive +handed +twelve +dress +audience +granted +brazil +##well +spirit +##ated +noticed +etc +olympic +representative +eric +tight +trouble +reviews +drink +vampire +missing +roles +ranked +newly +household +finals +wave +critics +##ee +phase +massachusetts +pilot +unlike +philadelphia +bright +guns +crown +organizations +roof +42 +respectively +clearly +tongue +marked +circle +fox +korea +bronze +brian +expanded +sexual +supply +yourself +inspired +labour +fc +##ah +reference +vision +draft +connection +brand +reasons +1935 +classic +driving +trip +jesus +cells +entry +1920 +neither +trail +claims +atlantic +orders +labor +nose +afraid +identified +intelligence +calls +cancer +attacked +passing +stephen +positions +imperial +grey +jason +39 +sunday +48 +swedish +avoid +extra +uncle +message +covers +allows +surprise +materials +fame +hunter +##ji +1930 +citizens +figures +davis +environmental +confirmed +shit +titles +di +performing +difference +acts +attacks +##ov +existing +votes +opportunity +nor +shop +entirely +trains +opposite +pakistan +##pa +develop +resulted +representatives +actions +reality +pressed +##ish +barely +wine +conversation +faculty +northwest +ends +documentary +nuclear +stock +grace +sets +eat +alternative +##ps +bag +resulting +creating +surprised +cemetery +1919 +drop +finding +sarah +cricket +streets +tradition +ride +1933 +exhibition +target +ear +explained +rain +composer +injury +apartment +municipal +educational +occupied +netherlands +clean +billion +constitution +learn +1914 +maximum +classical +francis +lose +opposition +jose +ontario +bear +core +hills +rolled +ending +drawn +permanent +fun +##tes +##lla +lewis +sites +chamber +ryan +##way +scoring +height +1934 +##house +lyrics +staring +55 +officials +1917 +snow +oldest +##tic +orange +##ger +qualified +interior +apparently +succeeded +thousand +dinner +lights +existence +fans +heavily +41 +greatest +conservative +send +bowl +plus +enter +catch +##un +economy +duty +1929 +speech +authorities +princess +performances +versions +shall +graduate +pictures +effective +remembered +poetry +desk +crossed +starring +starts +passenger +sharp +##ant +acres +ass +weather +falling +rank +fund +supporting +check +adult +publishing +heads +cm +southeast +lane +##burg +application +bc +##ura +les +condition +transfer +prevent +display +ex +regions +earl +federation +cool +relatively +answered +besides +1928 +obtained +portion +##town +mix +##ding +reaction +liked +dean +express +peak +1932 +##tte +counter +religion +chain +rare +miller +convention +aid +lie +vehicles +mobile +perform +squad +wonder +lying +crazy +sword +##ping +attempted +centuries +weren +philosophy +category +##ize +anna +interested +47 +sweden +wolf +frequently +abandoned +kg +literary +alliance +task +entitled +##ay +threw +promotion +factory +tiny +soccer +visited +matt +fm +achieved +52 +defence +internal +persian +43 +methods +##ging +arrested +otherwise +cambridge +programming +villages +elementary +districts +rooms +criminal +conflict +worry +trained +1931 +attempts +waited +signal +bird +truck +subsequent +programme +##ol +ad +49 +communist +details +faith +sector +patrick +carrying +laugh +##ss +controlled +korean +showing +origin +fuel +evil +1927 +##ent +brief +identity +darkness +address +pool +missed +publication +web +planet +ian +anne +wings +invited +##tt +briefly +standards +kissed +##be +ideas +climate +causing +walter +worse +albert +articles +winners +desire +aged +northeast +dangerous +gate +doubt +1922 +wooden +multi +##ky +poet +rising +funding +46 +communications +communication +violence +copies +prepared +ford +investigation +skills +1924 +pulling +electronic +##ak +##ial +##han +containing +ultimately +offices +singing +understanding +restaurant +tomorrow +fashion +christ +ward +da +pope +stands +5th +flow +studios +aired +commissioned +contained +exist +fresh +americans +##per +wrestling +approved +kid +employed +respect +suit +1925 +angel +asking +increasing +frame +angry +selling +1950s +thin +finds +##nd +temperature +statement +ali +explain +inhabitants +towns +extensive +narrow +51 +jane +flowers +images +promise +somewhere +object +fly +closely +##ls +1912 +bureau +cape +1926 +weekly +presidential +legislative +1921 +##ai +##au +launch +founding +##ny +978 +##ring +artillery +strike +un +institutions +roll +writers +landing +chose +kevin +anymore +pp +##ut +attorney +fit +dan +billboard +receiving +agricultural +breaking +sought +dave +admitted +lands +mexican +##bury +charlie +specifically +hole +iv +howard +credit +moscow +roads +accident +1923 +proved +wear +struck +hey +guards +stuff +slid +expansion +1915 +cat +anthony +##kin +melbourne +opposed +sub +southwest +architect +failure +plane +1916 +##ron +map +camera +tank +listen +regarding +wet +introduction +metropolitan +link +ep +fighter +inch +grown +gene +anger +fixed +buy +dvd +khan +domestic +worldwide +chapel +mill +functions +examples +##head +developing +1910 +turkey +hits +pocket +antonio +papers +grow +unless +circuit +18th +concerned +attached +journalist +selection +journey +converted +provincial +painted +hearing +aren +bands +negative +aside +wondered +knight +lap +survey +ma +##ow +noise +billy +##ium +shooting +guide +bedroom +priest +resistance +motor +homes +sounded +giant +##mer +150 +scenes +equal +comic +patients +hidden +solid +actual +bringing +afternoon +touched +funds +wedding +consisted +marie +canal +sr +kim +treaty +turkish +recognition +residence +cathedral +broad +knees +incident +shaped +fired +norwegian +handle +cheek +contest +represent +##pe +representing +beauty +##sen +birds +advantage +emergency +wrapped +drawing +notice +pink +broadcasting +##ong +somehow +bachelor +seventh +collected +registered +establishment +alan +assumed +chemical +personnel +roger +retirement +jeff +portuguese +wore +tied +device +threat +progress +advance +##ised +banks +hired +manchester +nfl +teachers +structures +forever +##bo +tennis +helping +saturday +sale +applications +junction +hip +incorporated +neighborhood +dressed +ceremony +##ds +influenced +hers +visual +stairs +decades +inner +kansas +hung +hoped +gain +scheduled +downtown +engaged +austria +clock +norway +certainly +pale +protected +1913 +victor +employees +plate +putting +surrounded +##ists +finishing +blues +tropical +##ries +minnesota +consider +philippines +accept +54 +retrieved +1900 +concern +anderson +properties +institution +gordon +successfully +vietnam +##dy +backing +outstanding +muslim +crossing +folk +producing +usual +demand +occurs +observed +lawyer +educated +##ana +kelly +string +pleasure +budget +items +quietly +colorado +philip +typical +##worth +derived +600 +survived +asks +mental +##ide +56 +jake +jews +distinguished +ltd +1911 +sri +extremely +53 +athletic +loud +thousands +worried +shadow +transportation +horses +weapon +arena +importance +users +tim +objects +contributed +dragon +douglas +aware +senator +johnny +jordan +sisters +engines +flag +investment +samuel +shock +capable +clark +row +wheel +refers +session +familiar +biggest +wins +hate +maintained +drove +hamilton +request +expressed +injured +underground +churches +walker +wars +tunnel +passes +stupid +agriculture +softly +cabinet +regarded +joining +indiana +##ea +##ms +push +dates +spend +behavior +woods +protein +gently +chase +morgan +mention +burning +wake +combination +occur +mirror +leads +jimmy +indeed +impossible +singapore +paintings +covering +##nes +soldier +locations +attendance +sell +historian +wisconsin +invasion +argued +painter +diego +changing +egypt +##don +experienced +inches +##ku +missouri +vol +grounds +spoken +switzerland +##gan +reform +rolling +ha +forget +massive +resigned +burned +allen +tennessee +locked +values +improved +##mo +wounded +universe +sick +dating +facing +pack +purchase +user +##pur +moments +##ul +merged +anniversary +1908 +coal +brick +understood +causes +dynasty +queensland +establish +stores +crisis +promote +hoping +views +cards +referee +extension +##si +raise +arizona +improve +colonial +formal +charged +##rt +palm +lucky +hide +rescue +faces +95 +feelings +candidates +juan +##ell +goods +6th +courses +weekend +59 +luke +cash +fallen +##om +delivered +affected +installed +carefully +tries +swiss +hollywood +costs +lincoln +responsibility +##he +shore +file +proper +normally +maryland +assistance +jump +constant +offering +friendly +waters +persons +realize +contain +trophy +800 +partnership +factor +58 +musicians +cry +bound +oregon +indicated +hero +houston +medium +##ure +consisting +somewhat +##ara +57 +cycle +##che +beer +moore +frederick +gotten +eleven +worst +weak +approached +arranged +chin +loan +universal +bond +fifteen +pattern +disappeared +##ney +translated +##zed +lip +arab +capture +interests +insurance +##chi +shifted +cave +prix +warning +sections +courts +coat +plot +smell +feed +golf +favorite +maintain +knife +vs +voted +degrees +finance +quebec +opinion +translation +manner +ruled +operate +productions +choose +musician +discovery +confused +tired +separated +stream +techniques +committed +attend +ranking +kings +throw +passengers +measure +horror +fan +mining +sand +danger +salt +calm +decade +dam +require +runner +##ik +rush +associate +greece +##ker +rivers +consecutive +matthew +##ski +sighed +sq +documents +steam +edited +closing +tie +accused +1905 +##ini +islamic +distributed +directors +organisation +bruce +7th +breathing +mad +lit +arrival +concrete +taste +08 +composition +shaking +faster +amateur +adjacent +stating +1906 +twin +flew +##ran +tokyo +publications +##tone +obviously +ridge +storage +1907 +carl +pages +concluded +desert +driven +universities +ages +terminal +sequence +borough +250 +constituency +creative +cousin +economics +dreams +margaret +notably +reduce +montreal +mode +17th +ears +saved +jan +vocal +##ica +1909 +andy +##jo +riding +roughly +threatened +##ise +meters +meanwhile +landed +compete +repeated +grass +czech +regularly +charges +tea +sudden +appeal +##ung +solution +describes +pierre +classification +glad +parking +##ning +belt +physics +99 +rachel +add +hungarian +participate +expedition +damaged +gift +childhood +85 +fifty +##red +mathematics +jumped +letting +defensive +mph +##ux +##gh +testing +##hip +hundreds +shoot +owners +matters +smoke +israeli +kentucky +dancing +mounted +grandfather +emma +designs +profit +argentina +##gs +truly +li +lawrence +cole +begun +detroit +willing +branches +smiling +decide +miami +enjoyed +recordings +##dale +poverty +ethnic +gay +##bi +gary +arabic +09 +accompanied +##one +##ons +fishing +determine +residential +acid +##ary +alice +returns +starred +mail +##ang +jonathan +strategy +##ue +net +forty +cook +businesses +equivalent +commonwealth +distinct +ill +##cy +seriously +##ors +##ped +shift +harris +replace +rio +imagine +formula +ensure +##ber +additionally +scheme +conservation +occasionally +purposes +feels +favor +##and +##ore +1930s +contrast +hanging +hunt +movies +1904 +instruments +victims +danish +christopher +busy +demon +sugar +earliest +colony +studying +balance +duties +##ks +belgium +slipped +carter +05 +visible +stages +iraq +fifa +##im +commune +forming +zero +07 +continuing +talked +counties +legend +bathroom +option +tail +clay +daughters +afterwards +severe +jaw +visitors +##ded +devices +aviation +russell +kate +##vi +entering +subjects +##ino +temporary +swimming +forth +smooth +ghost +audio +bush +operates +rocks +movements +signs +eddie +##tz +ann +voices +honorary +06 +memories +dallas +pure +measures +racial +promised +66 +harvard +ceo +16th +parliamentary +indicate +benefit +flesh +dublin +louisiana +1902 +1901 +patient +sleeping +1903 +membership +coastal +medieval +wanting +element +scholars +rice +62 +limit +survive +makeup +rating +definitely +collaboration +obvious +##tan +boss +ms +baron +birthday +linked +soil +diocese +##lan +ncaa +##mann +offensive +shell +shouldn +waist +##tus +plain +ross +organ +resolution +manufacturing +adding +relative +kennedy +98 +whilst +moth +marketing +gardens +crash +72 +heading +partners +credited +carlos +moves +cable +##zi +marshall +##out +depending +bottle +represents +rejected +responded +existed +04 +jobs +denmark +lock +##ating +treated +graham +routes +talent +commissioner +drugs +secure +tests +reign +restored +photography +##gi +contributions +oklahoma +designer +disc +grin +seattle +robin +paused +atlanta +unusual +##gate +praised +las +laughing +satellite +hungary +visiting +##sky +interesting +factors +deck +poems +norman +##water +stuck +speaker +rifle +domain +premiered +##her +dc +comics +actors +01 +reputation +eliminated +8th +ceiling +prisoners +script +##nce +leather +austin +mississippi +rapidly +admiral +parallel +charlotte +guilty +tools +gender +divisions +fruit +##bs +laboratory +nelson +fantasy +marry +rapid +aunt +tribe +requirements +aspects +suicide +amongst +adams +bone +ukraine +abc +kick +sees +edinburgh +clothing +column +rough +gods +hunting +broadway +gathered +concerns +##ek +spending +ty +12th +snapped +requires +solar +bones +cavalry +##tta +iowa +drinking +waste +index +franklin +charity +thompson +stewart +tip +flash +landscape +friday +enjoy +singh +poem +listening +##back +eighth +fred +differences +adapted +bomb +ukrainian +surgery +corporate +masters +anywhere +##more +waves +odd +sean +portugal +orleans +dick +debate +kent +eating +puerto +cleared +96 +expect +cinema +97 +guitarist +blocks +electrical +agree +involving +depth +dying +panel +struggle +##ged +peninsula +adults +novels +emerged +vienna +metro +debuted +shoes +tamil +songwriter +meets +prove +beating +instance +heaven +scared +sending +marks +artistic +passage +superior +03 +significantly +shopping +##tive +retained +##izing +malaysia +technique +cheeks +##ola +warren +maintenance +destroy +extreme +allied +120 +appearing +##yn +fill +advice +alabama +qualifying +policies +cleveland +hat +battery +smart +authors +10th +soundtrack +acted +dated +lb +glance +equipped +coalition +funny +outer +ambassador +roy +possibility +couples +campbell +dna +loose +ethan +supplies +1898 +gonna +88 +monster +##res +shake +agents +frequency +springs +dogs +practices +61 +gang +plastic +easier +suggests +gulf +blade +exposed +colors +industries +markets +pan +nervous +electoral +charts +legislation +ownership +##idae +mac +appointment +shield +copy +assault +socialist +abbey +monument +license +throne +employment +jay +93 +replacement +charter +cloud +powered +suffering +accounts +oak +connecticut +strongly +wright +colour +crystal +13th +context +welsh +networks +voiced +gabriel +jerry +##cing +forehead +mp +##ens +manage +schedule +totally +remix +##ii +forests +occupation +print +nicholas +brazilian +strategic +vampires +engineers +76 +roots +seek +correct +instrumental +und +alfred +backed +hop +##des +stanley +robinson +traveled +wayne +welcome +austrian +achieve +67 +exit +rates +1899 +strip +whereas +##cs +sing +deeply +adventure +bobby +rick +jamie +careful +components +cap +useful +personality +knee +##shi +pushing +hosts +02 +protest +ca +ottoman +symphony +##sis +63 +boundary +1890 +processes +considering +considerable +tons +##work +##ft +##nia +cooper +trading +dear +conduct +91 +illegal +apple +revolutionary +holiday +definition +harder +##van +jacob +circumstances +destruction +##lle +popularity +grip +classified +liverpool +donald +baltimore +flows +seeking +honour +approval +92 +mechanical +till +happening +statue +critic +increasingly +immediate +describe +commerce +stare +##ster +indonesia +meat +rounds +boats +baker +orthodox +depression +formally +worn +naked +claire +muttered +sentence +11th +emily +document +77 +criticism +wished +vessel +spiritual +bent +virgin +parker +minimum +murray +lunch +danny +printed +compilation +keyboards +false +blow +belonged +68 +raising +78 +cutting +##board +pittsburgh +##up +9th +shadows +81 +hated +indigenous +jon +15th +barry +scholar +ah +##zer +oliver +##gy +stick +susan +meetings +attracted +spell +romantic +##ver +ye +1895 +photo +demanded +customers +##ac +1896 +logan +revival +keys +modified +commanded +jeans +##ious +upset +raw +phil +detective +hiding +resident +vincent +##bly +experiences +diamond +defeating +coverage +lucas +external +parks +franchise +helen +bible +successor +percussion +celebrated +il +lift +profile +clan +romania +##ied +mills +##su +nobody +achievement +shrugged +fault +1897 +rhythm +initiative +breakfast +carbon +700 +69 +lasted +violent +74 +wound +ken +killer +gradually +filmed +°c +dollars +processing +94 +remove +criticized +guests +sang +chemistry +##vin +legislature +disney +##bridge +uniform +escaped +integrated +proposal +purple +denied +liquid +karl +influential +morris +nights +stones +intense +experimental +twisted +71 +84 +##ld +pace +nazi +mitchell +ny +blind +reporter +newspapers +14th +centers +burn +basin +forgotten +surviving +filed +collections +monastery +losses +manual +couch +description +appropriate +merely +tag +missions +sebastian +restoration +replacing +triple +73 +elder +julia +warriors +benjamin +julian +convinced +stronger +amazing +declined +versus +merchant +happens +output +finland +bare +barbara +absence +ignored +dawn +injuries +##port +producers +##ram +82 +luis +##ities +kw +admit +expensive +electricity +nba +exception +symbol +##ving +ladies +shower +sheriff +characteristics +##je +aimed +button +ratio +effectively +summit +angle +jury +bears +foster +vessels +pants +executed +evans +dozen +advertising +kicked +patrol +1889 +competitions +lifetime +principles +athletics +##logy +birmingham +sponsored +89 +rob +nomination +1893 +acoustic +##sm +creature +longest +##tra +credits +harbor +dust +josh +##so +territories +milk +infrastructure +completion +thailand +indians +leon +archbishop +##sy +assist +pitch +blake +arrangement +girlfriend +serbian +operational +hence +sad +scent +fur +dj +sessions +hp +refer +rarely +##ora +exists +1892 +##ten +scientists +dirty +penalty +burst +portrait +seed +79 +pole +limits +rival +1894 +stable +alpha +grave +constitutional +alcohol +arrest +flower +mystery +devil +architectural +relationships +greatly +habitat +##istic +larry +progressive +remote +cotton +##ics +##ok +preserved +reaches +##ming +cited +86 +vast +scholarship +decisions +cbs +joy +teach +1885 +editions +knocked +eve +searching +partly +participation +gap +animated +fate +excellent +##ett +na +87 +alternate +saints +youngest +##ily +climbed +##ita +##tors +suggest +##ct +discussion +staying +choir +lakes +jacket +revenue +nevertheless +peaked +instrument +wondering +annually +managing +neil +1891 +signing +terry +##ice +apply +clinical +brooklyn +aim +catherine +fuck +farmers +figured +ninth +pride +hugh +evolution +ordinary +involvement +comfortable +shouted +tech +encouraged +taiwan +representation +sharing +##lia +##em +panic +exact +cargo +competing +fat +cried +83 +1920s +occasions +pa +cabin +borders +utah +marcus +##isation +badly +muscles +##ance +victorian +transition +warner +bet +permission +##rin +slave +terrible +similarly +shares +seth +uefa +possession +medals +benefits +colleges +lowered +perfectly +mall +transit +##ye +##kar +publisher +##ened +harrison +deaths +elevation +##ae +asleep +machines +sigh +ash +hardly +argument +occasion +parent +leo +decline +1888 +contribution +##ua +concentration +1000 +opportunities +hispanic +guardian +extent +emotions +hips +mason +volumes +bloody +controversy +diameter +steady +mistake +phoenix +identify +violin +##sk +departure +richmond +spin +funeral +enemies +1864 +gear +literally +connor +random +sergeant +grab +confusion +1865 +transmission +informed +op +leaning +sacred +suspended +thinks +gates +portland +luck +agencies +yours +hull +expert +muscle +layer +practical +sculpture +jerusalem +latest +lloyd +statistics +deeper +recommended +warrior +arkansas +mess +supports +greg +eagle +1880 +recovered +rated +concerts +rushed +##ano +stops +eggs +files +premiere +keith +##vo +delhi +turner +pit +affair +belief +paint +##zing +mate +##ach +##ev +victim +##ology +withdrew +bonus +styles +fled +##ud +glasgow +technologies +funded +nbc +adaptation +##ata +portrayed +cooperation +supporters +judges +bernard +justin +hallway +ralph +##ick +graduating +controversial +distant +continental +spider +bite +##ho +recognize +intention +mixing +##ese +egyptian +bow +tourism +suppose +claiming +tiger +dominated +participants +vi +##ru +nurse +partially +tape +##rum +psychology +##rn +essential +touring +duo +voting +civilian +emotional +channels +##king +apparent +hebrew +1887 +tommy +carrier +intersection +beast +hudson +##gar +##zo +lab +nova +bench +discuss +costa +##ered +detailed +behalf +drivers +unfortunately +obtain +##lis +rocky +##dae +siege +friendship +honey +##rian +1861 +amy +hang +posted +governments +collins +respond +wildlife +preferred +operator +##po +laura +pregnant +videos +dennis +suspected +boots +instantly +weird +automatic +businessman +alleged +placing +throwing +ph +mood +1862 +perry +venue +jet +remainder +##lli +##ci +passion +biological +boyfriend +1863 +dirt +buffalo +ron +segment +fa +abuse +##era +genre +thrown +stroke +colored +stress +exercise +displayed +##gen +struggled +##tti +abroad +dramatic +wonderful +thereafter +madrid +component +widespread +##sed +tale +citizen +todd +monday +1886 +vancouver +overseas +forcing +crying +descent +##ris +discussed +substantial +ranks +regime +1870 +provinces +switch +drum +zane +ted +tribes +proof +lp +cream +researchers +volunteer +manor +silk +milan +donated +allies +venture +principle +delivery +enterprise +##ves +##ans +bars +traditionally +witch +reminded +copper +##uk +pete +inter +links +colin +grinned +elsewhere +competitive +frequent +##oy +scream +##hu +tension +texts +submarine +finnish +defending +defend +pat +detail +1884 +affiliated +stuart +themes +villa +periods +tool +belgian +ruling +crimes +answers +folded +licensed +resort +demolished +hans +lucy +1881 +lion +traded +photographs +writes +craig +##fa +trials +generated +beth +noble +debt +percentage +yorkshire +erected +ss +viewed +grades +confidence +ceased +islam +telephone +retail +##ible +chile +m² +roberts +sixteen +##ich +commented +hampshire +innocent +dual +pounds +checked +regulations +afghanistan +sung +rico +liberty +assets +bigger +options +angels +relegated +tribute +wells +attending +leaf +##yan +butler +romanian +forum +monthly +lisa +patterns +gmina +##tory +madison +hurricane +rev +##ians +bristol +##ula +elite +valuable +disaster +democracy +awareness +germans +freyja +##ins +loop +absolutely +paying +populations +maine +sole +prayer +spencer +releases +doorway +bull +##ani +lover +midnight +conclusion +##sson +thirteen +lily +mediterranean +##lt +nhl +proud +sample +##hill +drummer +guinea +##ova +murphy +climb +##ston +instant +attributed +horn +ain +railways +steven +##ao +autumn +ferry +opponent +root +traveling +secured +corridor +stretched +tales +sheet +trinity +cattle +helps +indicates +manhattan +murdered +fitted +1882 +gentle +grandmother +mines +shocked +vegas +produces +##light +caribbean +##ou +belong +continuous +desperate +drunk +historically +trio +waved +raf +dealing +nathan +bat +murmured +interrupted +residing +scientist +pioneer +harold +aaron +##net +delta +attempting +minority +mini +believes +chorus +tend +lots +eyed +indoor +load +shots +updated +jail +##llo +concerning +connecting +wealth +##ved +slaves +arrive +rangers +sufficient +rebuilt +##wick +cardinal +flood +muhammad +whenever +relation +runners +moral +repair +viewers +arriving +revenge +punk +assisted +bath +fairly +breathe +lists +innings +illustrated +whisper +nearest +voters +clinton +ties +ultimate +screamed +beijing +lions +andre +fictional +gathering +comfort +radar +suitable +dismissed +hms +ban +pine +wrist +atmosphere +voivodeship +bid +timber +##ned +##nan +giants +##ane +cameron +recovery +uss +identical +categories +switched +serbia +laughter +noah +ensemble +therapy +peoples +touching +##off +locally +pearl +platforms +everywhere +ballet +tables +lanka +herbert +outdoor +toured +derek +1883 +spaces +contested +swept +1878 +exclusive +slight +connections +##dra +winds +prisoner +collective +bangladesh +tube +publicly +wealthy +thai +##ys +isolated +select +##ric +insisted +pen +fortune +ticket +spotted +reportedly +animation +enforcement +tanks +110 +decides +wider +lowest +owen +##time +nod +hitting +##hn +gregory +furthermore +magazines +fighters +solutions +##ery +pointing +requested +peru +reed +chancellor +knights +mask +worker +eldest +flames +reduction +1860 +volunteers +##tis +reporting +##hl +wire +advisory +endemic +origins +settlers +pursue +knock +consumer +1876 +eu +compound +creatures +mansion +sentenced +ivan +deployed +guitars +frowned +involves +mechanism +kilometers +perspective +shops +maps +terminus +duncan +alien +fist +bridges +##pers +heroes +fed +derby +swallowed +##ros +patent +sara +illness +characterized +adventures +slide +hawaii +jurisdiction +##op +organised +##side +adelaide +walks +biology +se +##ties +rogers +swing +tightly +boundaries +##rie +prepare +implementation +stolen +##sha +certified +colombia +edwards +garage +##mm +recalled +##ball +rage +harm +nigeria +breast +##ren +furniture +pupils +settle +##lus +cuba +balls +client +alaska +21st +linear +thrust +celebration +latino +genetic +terror +##cia +##ening +lightning +fee +witness +lodge +establishing +skull +##ique +earning +hood +##ei +rebellion +wang +sporting +warned +missile +devoted +activist +porch +worship +fourteen +package +1871 +decorated +##shire +housed +##ock +chess +sailed +doctors +oscar +joan +treat +garcia +harbour +jeremy +##ire +traditions +dominant +jacques +##gon +##wan +relocated +1879 +amendment +sized +companion +simultaneously +volleyball +spun +acre +increases +stopping +loves +belongs +affect +drafted +tossed +scout +battles +1875 +filming +shoved +munich +tenure +vertical +romance +pc +##cher +argue +##ical +craft +ranging +www +opens +honest +tyler +yesterday +virtual +##let +muslims +reveal +snake +immigrants +radical +screaming +speakers +firing +saving +belonging +ease +lighting +prefecture +blame +farmer +hungry +grows +rubbed +beam +sur +subsidiary +##cha +armenian +sao +dropping +conventional +##fer +microsoft +reply +qualify +spots +1867 +sweat +festivals +##ken +immigration +physician +discover +exposure +sandy +explanation +isaac +implemented +##fish +hart +initiated +connect +stakes +presents +heights +householder +pleased +tourist +regardless +slip +closest +##ction +surely +sultan +brings +riley +preparation +aboard +slammed +baptist +experiment +ongoing +interstate +organic +playoffs +##ika +1877 +130 +##tar +hindu +error +tours +tier +plenty +arrangements +talks +trapped +excited +sank +ho +athens +1872 +denver +welfare +suburb +athletes +trick +diverse +belly +exclusively +yelled +1868 +##med +conversion +##ette +1874 +internationally +computers +conductor +abilities +sensitive +hello +dispute +measured +globe +rocket +prices +amsterdam +flights +tigers +inn +municipalities +emotion +references +3d +##mus +explains +airlines +manufactured +pm +archaeological +1873 +interpretation +devon +comment +##ites +settlements +kissing +absolute +improvement +suite +impressed +barcelona +sullivan +jefferson +towers +jesse +julie +##tin +##lu +grandson +hi +gauge +regard +rings +interviews +trace +raymond +thumb +departments +burns +serial +bulgarian +scores +demonstrated +##ix +1866 +kyle +alberta +underneath +romanized +##ward +relieved +acquisition +phrase +cliff +reveals +han +cuts +merger +custom +##dar +nee +gilbert +graduation +##nts +assessment +cafe +difficulty +demands +swung +democrat +jennifer +commons +1940s +grove +##yo +completing +focuses +sum +substitute +bearing +stretch +reception +##py +reflected +essentially +destination +pairs +##ched +survival +resource +##bach +promoting +doubles +messages +tear +##down +##fully +parade +florence +harvey +incumbent +partial +framework +900 +pedro +frozen +procedure +olivia +controls +##mic +shelter +personally +temperatures +##od +brisbane +tested +sits +marble +comprehensive +oxygen +leonard +##kov +inaugural +iranian +referring +quarters +attitude +##ivity +mainstream +lined +mars +dakota +norfolk +unsuccessful +##° +explosion +helicopter +congressional +##sing +inspector +bitch +seal +departed +divine +##ters +coaching +examination +punishment +manufacturer +sink +columns +unincorporated +signals +nevada +squeezed +dylan +dining +photos +martial +manuel +eighteen +elevator +brushed +plates +ministers +ivy +congregation +##len +slept +specialized +taxes +curve +restricted +negotiations +likes +statistical +arnold +inspiration +execution +bold +intermediate +significance +margin +ruler +wheels +gothic +intellectual +dependent +listened +eligible +buses +widow +syria +earn +cincinnati +collapsed +recipient +secrets +accessible +philippine +maritime +goddess +clerk +surrender +breaks +playoff +database +##ified +##lon +ideal +beetle +aspect +soap +regulation +strings +expand +anglo +shorter +crosses +retreat +tough +coins +wallace +directions +pressing +##oon +shipping +locomotives +comparison +topics +nephew +##mes +distinction +honors +travelled +sierra +ibn +##over +fortress +sa +recognised +carved +1869 +clients +##dan +intent +##mar +coaches +describing +bread +##ington +beaten +northwestern +##ona +merit +youtube +collapse +challenges +em +historians +objective +submitted +virus +attacking +drake +assume +##ere +diseases +marc +stem +leeds +##cus +##ab +farming +glasses +##lock +visits +nowhere +fellowship +relevant +carries +restaurants +experiments +101 +constantly +bases +targets +shah +tenth +opponents +verse +territorial +##ira +writings +corruption +##hs +instruction +inherited +reverse +emphasis +##vic +employee +arch +keeps +rabbi +watson +payment +uh +##ala +nancy +##tre +venice +fastest +sexy +banned +adrian +properly +ruth +touchdown +dollar +boards +metre +circles +edges +favour +comments +ok +travels +liberation +scattered +firmly +##ular +holland +permitted +diesel +kenya +den +originated +##ral +demons +resumed +dragged +rider +##rus +servant +blinked +extend +torn +##ias +##sey +input +meal +everybody +cylinder +kinds +camps +##fe +bullet +logic +##wn +croatian +evolved +healthy +fool +chocolate +wise +preserve +pradesh +##ess +respective +1850 +##ew +chicken +artificial +gross +corresponding +convicted +cage +caroline +dialogue +##dor +narrative +stranger +mario +br +christianity +failing +trent +commanding +buddhist +1848 +maurice +focusing +yale +bike +altitude +##ering +mouse +revised +##sley +veteran +##ig +pulls +theology +crashed +campaigns +legion +##ability +drag +excellence +customer +cancelled +intensity +excuse +##lar +liga +participating +contributing +printing +##burn +variable +##rk +curious +bin +legacy +renaissance +##my +symptoms +binding +vocalist +dancer +##nie +grammar +gospel +democrats +ya +enters +sc +diplomatic +hitler +##ser +clouds +mathematical +quit +defended +oriented +##heim +fundamental +hardware +impressive +equally +convince +confederate +guilt +chuck +sliding +##ware +magnetic +narrowed +petersburg +bulgaria +otto +phd +skill +##ama +reader +hopes +pitcher +reservoir +hearts +automatically +expecting +mysterious +bennett +extensively +imagined +seeds +monitor +fix +##ative +journalism +struggling +signature +ranch +encounter +photographer +observation +protests +##pin +influences +##hr +calendar +##all +cruz +croatia +locomotive +hughes +naturally +shakespeare +basement +hook +uncredited +faded +theories +approaches +dare +phillips +filling +fury +obama +##ain +efficient +arc +deliver +min +raid +breeding +inducted +leagues +efficiency +axis +montana +eagles +##ked +supplied +instructions +karen +picking +indicating +trap +anchor +practically +christians +tomb +vary +occasional +electronics +lords +readers +newcastle +faint +innovation +collect +situations +engagement +160 +claude +mixture +##feld +peer +tissue +logo +lean +##ration +°f +floors +##ven +architects +reducing +##our +##ments +rope +1859 +ottawa +##har +samples +banking +declaration +proteins +resignation +francois +saudi +advocate +exhibited +armor +twins +divorce +##ras +abraham +reviewed +jo +temporarily +matrix +physically +pulse +curled +##ena +difficulties +bengal +usage +##ban +annie +riders +certificate +##pi +holes +warsaw +distinctive +jessica +##mon +mutual +1857 +customs +circular +eugene +removal +loaded +mere +vulnerable +depicted +generations +dame +heir +enormous +lightly +climbing +pitched +lessons +pilots +nepal +ram +google +preparing +brad +louise +renowned +##₂ +liam +##ably +plaza +shaw +sophie +brilliant +bills +##bar +##nik +fucking +mainland +server +pleasant +seized +veterans +jerked +fail +beta +brush +radiation +stored +warmth +southeastern +nate +sin +raced +berkeley +joke +athlete +designation +trunk +##low +roland +qualification +archives +heels +artwork +receives +judicial +reserves +##bed +woke +installation +abu +floating +fake +lesser +excitement +interface +concentrated +addressed +characteristic +amanda +saxophone +monk +auto +##bus +releasing +egg +dies +interaction +defender +ce +outbreak +glory +loving +##bert +sequel +consciousness +http +awake +ski +enrolled +##ress +handling +rookie +brow +somebody +biography +warfare +amounts +contracts +presentation +fabric +dissolved +challenged +meter +psychological +lt +elevated +rally +accurate +##tha +hospitals +undergraduate +specialist +venezuela +exhibit +shed +nursing +protestant +fluid +structural +footage +jared +consistent +prey +##ska +succession +reflect +exile +lebanon +wiped +suspect +shanghai +resting +integration +preservation +marvel +variant +pirates +sheep +rounded +capita +sailing +colonies +manuscript +deemed +variations +clarke +functional +emerging +boxing +relaxed +curse +azerbaijan +heavyweight +nickname +editorial +rang +grid +tightened +earthquake +flashed +miguel +rushing +##ches +improvements +boxes +brooks +180 +consumption +molecular +felix +societies +repeatedly +variation +aids +civic +graphics +professionals +realm +autonomous +receiver +delayed +workshop +militia +chairs +trump +canyon +##point +harsh +extending +lovely +happiness +##jan +stake +eyebrows +embassy +wellington +hannah +##ella +sony +corners +bishops +swear +cloth +contents +xi +namely +commenced +1854 +stanford +nashville +courage +graphic +commitment +garrison +##bin +hamlet +clearing +rebels +attraction +literacy +cooking +ruins +temples +jenny +humanity +celebrate +hasn +freight +sixty +rebel +bastard +##art +newton +##ada +deer +##ges +##ching +smiles +delaware +singers +##ets +approaching +assists +flame +##ph +boulevard +barrel +planted +##ome +pursuit +##sia +consequences +posts +shallow +invitation +rode +depot +ernest +kane +rod +concepts +preston +topic +chambers +striking +blast +arrives +descendants +montgomery +ranges +worlds +##lay +##ari +span +chaos +praise +##ag +fewer +1855 +sanctuary +mud +fbi +##ions +programmes +maintaining +unity +harper +bore +handsome +closure +tournaments +thunder +nebraska +linda +facade +puts +satisfied +argentine +dale +cork +dome +panama +##yl +1858 +tasks +experts +##ates +feeding +equation +##las +##ida +##tu +engage +bryan +##ax +um +quartet +melody +disbanded +sheffield +blocked +gasped +delay +kisses +maggie +connects +##non +sts +poured +creator +publishers +##we +guided +ellis +extinct +hug +gaining +##ord +complicated +##bility +poll +clenched +investigate +##use +thereby +quantum +spine +cdp +humor +kills +administered +semifinals +##du +encountered +ignore +##bu +commentary +##maker +bother +roosevelt +140 +plains +halfway +flowing +cultures +crack +imprisoned +neighboring +airline +##ses +##view +##mate +##ec +gather +wolves +marathon +transformed +##ill +cruise +organisations +carol +punch +exhibitions +numbered +alarm +ratings +daddy +silently +##stein +queens +colours +impression +guidance +liu +tactical +##rat +marshal +della +arrow +##ings +rested +feared +tender +owns +bitter +advisor +escort +##ides +spare +farms +grants +##ene +dragons +encourage +colleagues +cameras +##und +sucked +pile +spirits +prague +statements +suspension +landmark +fence +torture +recreation +bags +permanently +survivors +pond +spy +predecessor +bombing +coup +##og +protecting +transformation +glow +##lands +##book +dug +priests +andrea +feat +barn +jumping +##chen +##ologist +##con +casualties +stern +auckland +pipe +serie +revealing +ba +##bel +trevor +mercy +spectrum +yang +consist +governing +collaborated +possessed +epic +comprises +blew +shane +##ack +lopez +honored +magical +sacrifice +judgment +perceived +hammer +mtv +baronet +tune +das +missionary +sheets +350 +neutral +oral +threatening +attractive +shade +aims +seminary +##master +estates +1856 +michel +wounds +refugees +manufacturers +##nic +mercury +syndrome +porter +##iya +##din +hamburg +identification +upstairs +purse +widened +pause +cared +breathed +affiliate +santiago +prevented +celtic +fisher +125 +recruited +byzantine +reconstruction +farther +##mp +diet +sake +au +spite +sensation +##ert +blank +separation +105 +##hon +vladimir +armies +anime +##lie +accommodate +orbit +cult +sofia +archive +##ify +##box +founders +sustained +disorder +honours +northeastern +mia +crops +violet +threats +blanket +fires +canton +followers +southwestern +prototype +voyage +assignment +altered +moderate +protocol +pistol +##eo +questioned +brass +lifting +1852 +math +authored +##ual +doug +dimensional +dynamic +##san +1851 +pronounced +grateful +quest +uncomfortable +boom +presidency +stevens +relating +politicians +chen +barrier +quinn +diana +mosque +tribal +cheese +palmer +portions +sometime +chester +treasure +wu +bend +download +millions +reforms +registration +##osa +consequently +monitoring +ate +preliminary +brandon +invented +ps +eaten +exterior +intervention +ports +documented +log +displays +lecture +sally +favourite +##itz +vermont +lo +invisible +isle +breed +##ator +journalists +relay +speaks +backward +explore +midfielder +actively +stefan +procedures +cannon +blond +kenneth +centered +servants +chains +libraries +malcolm +essex +henri +slavery +##hal +facts +fairy +coached +cassie +cats +washed +cop +##fi +announcement +item +2000s +vinyl +activated +marco +frontier +growled +curriculum +##das +loyal +accomplished +leslie +ritual +kenny +##00 +vii +napoleon +hollow +hybrid +jungle +stationed +friedrich +counted +##ulated +platinum +theatrical +seated +col +rubber +glen +1840 +diversity +healing +extends +id +provisions +administrator +columbus +##oe +tributary +te +assured +org +##uous +prestigious +examined +lectures +grammy +ronald +associations +bailey +allan +essays +flute +believing +consultant +proceedings +travelling +1853 +kit +kerala +yugoslavia +buddy +methodist +##ith +burial +centres +batman +##nda +discontinued +bo +dock +stockholm +lungs +severely +##nk +citing +manga +##ugh +steal +mumbai +iraqi +robot +celebrity +bride +broadcasts +abolished +pot +joel +overhead +franz +packed +reconnaissance +johann +acknowledged +introduce +handled +doctorate +developments +drinks +alley +palestine +##nis +##aki +proceeded +recover +bradley +grain +patch +afford +infection +nationalist +legendary +##ath +interchange +virtually +gen +gravity +exploration +amber +vital +wishes +powell +doctrine +elbow +screenplay +##bird +contribute +indonesian +pet +creates +##com +enzyme +kylie +discipline +drops +manila +hunger +##ien +layers +suffer +fever +bits +monica +keyboard +manages +##hood +searched +appeals +##bad +testament +grande +reid +##war +beliefs +congo +##ification +##dia +si +requiring +##via +casey +1849 +regret +streak +rape +depends +syrian +sprint +pound +tourists +upcoming +pub +##xi +tense +##els +practiced +echo +nationwide +guild +motorcycle +liz +##zar +chiefs +desired +elena +bye +precious +absorbed +relatives +booth +pianist +##mal +citizenship +exhausted +wilhelm +##ceae +##hed +noting +quarterback +urge +hectares +##gue +ace +holly +##tal +blonde +davies +parked +sustainable +stepping +twentieth +airfield +galaxy +nest +chip +##nell +tan +shaft +paulo +requirement +##zy +paradise +tobacco +trans +renewed +vietnamese +##cker +##ju +suggesting +catching +holmes +enjoying +md +trips +colt +holder +butterfly +nerve +reformed +cherry +bowling +trailer +carriage +goodbye +appreciate +toy +joshua +interactive +enabled +involve +##kan +collar +determination +bunch +facebook +recall +shorts +superintendent +episcopal +frustration +giovanni +nineteenth +laser +privately +array +circulation +##ovic +armstrong +deals +painful +permit +discrimination +##wi +aires +retiring +cottage +ni +##sta +horizon +ellen +jamaica +ripped +fernando +chapters +playstation +patron +lecturer +navigation +behaviour +genes +georgian +export +solomon +rivals +swift +seventeen +rodriguez +princeton +independently +sox +1847 +arguing +entity +casting +hank +criteria +oakland +geographic +milwaukee +reflection +expanding +conquest +dubbed +##tv +halt +brave +brunswick +doi +arched +curtis +divorced +predominantly +somerset +streams +ugly +zoo +horrible +curved +buenos +fierce +dictionary +vector +theological +unions +handful +stability +chan +punjab +segments +##lly +altar +ignoring +gesture +monsters +pastor +##stone +thighs +unexpected +operators +abruptly +coin +compiled +associates +improving +migration +pin +##ose +compact +collegiate +reserved +##urs +quarterfinals +roster +restore +assembled +hurry +oval +##cies +1846 +flags +martha +##del +victories +sharply +##rated +argues +deadly +neo +drawings +symbols +performer +##iel +griffin +restrictions +editing +andrews +java +journals +arabia +compositions +dee +pierce +removing +hindi +casino +runway +civilians +minds +nasa +hotels +##zation +refuge +rent +retain +potentially +conferences +suburban +conducting +##tto +##tions +##tle +descended +massacre +##cal +ammunition +terrain +fork +souls +counts +chelsea +durham +drives +cab +##bank +perth +realizing +palestinian +finn +simpson +##dal +betty +##ule +moreover +particles +cardinals +tent +evaluation +extraordinary +##oid +inscription +##works +wednesday +chloe +maintains +panels +ashley +trucks +##nation +cluster +sunlight +strikes +zhang +##wing +dialect +canon +##ap +tucked +##ws +collecting +##mas +##can +##sville +maker +quoted +evan +franco +aria +buying +cleaning +eva +closet +provision +apollo +clinic +rat +##ez +necessarily +ac +##gle +##ising +venues +flipped +cent +spreading +trustees +checking +authorized +##sco +disappointed +##ado +notion +duration +trumpet +hesitated +topped +brussels +rolls +theoretical +hint +define +aggressive +repeat +wash +peaceful +optical +width +allegedly +mcdonald +strict +copyright +##illa +investors +mar +jam +witnesses +sounding +miranda +michelle +privacy +hugo +harmony +##pp +valid +lynn +glared +nina +102 +headquartered +diving +boarding +gibson +##ncy +albanian +marsh +routine +dealt +enhanced +er +intelligent +substance +targeted +enlisted +discovers +spinning +observations +pissed +smoking +rebecca +capitol +visa +varied +costume +seemingly +indies +compensation +surgeon +thursday +arsenal +westminster +suburbs +rid +anglican +##ridge +knots +foods +alumni +lighter +fraser +whoever +portal +scandal +##ray +gavin +advised +instructor +flooding +terrorist +##ale +teenage +interim +senses +duck +teen +thesis +abby +eager +overcome +##ile +newport +glenn +rises +shame +##cc +prompted +priority +forgot +bomber +nicolas +protective +360 +cartoon +katherine +breeze +lonely +trusted +henderson +richardson +relax +banner +candy +palms +remarkable +##rio +legends +cricketer +essay +ordained +edmund +rifles +trigger +##uri +##away +sail +alert +1830 +audiences +penn +sussex +siblings +pursued +indianapolis +resist +rosa +consequence +succeed +avoided +1845 +##ulation +inland +##tie +##nna +counsel +profession +chronicle +hurried +##una +eyebrow +eventual +bleeding +innovative +cure +##dom +committees +accounting +con +scope +hardy +heather +tenor +gut +herald +codes +tore +scales +wagon +##oo +luxury +tin +prefer +fountain +triangle +bonds +darling +convoy +dried +traced +beings +troy +accidentally +slam +findings +smelled +joey +lawyers +outcome +steep +bosnia +configuration +shifting +toll +brook +performers +lobby +philosophical +construct +shrine +aggregate +boot +cox +phenomenon +savage +insane +solely +reynolds +lifestyle +##ima +nationally +holdings +consideration +enable +edgar +mo +mama +##tein +fights +relegation +chances +atomic +hub +conjunction +awkward +reactions +currency +finale +kumar +underwent +steering +elaborate +gifts +comprising +melissa +veins +reasonable +sunshine +chi +solve +trails +inhabited +elimination +ethics +huh +ana +molly +consent +apartments +layout +marines +##ces +hunters +bulk +##oma +hometown +##wall +##mont +cracked +reads +neighbouring +withdrawn +admission +wingspan +damned +anthology +lancashire +brands +batting +forgive +cuban +awful +##lyn +104 +dimensions +imagination +##ade +dante +##ship +tracking +desperately +goalkeeper +##yne +groaned +workshops +confident +burton +gerald +milton +circus +uncertain +slope +copenhagen +sophia +fog +philosopher +portraits +accent +cycling +varying +gripped +larvae +garrett +specified +scotia +mature +luther +kurt +rap +##kes +aerial +750 +ferdinand +heated +es +transported +##shan +safely +nonetheless +##orn +##gal +motors +demanding +##sburg +startled +##brook +ally +generate +caps +ghana +stained +demo +mentions +beds +ap +afterward +diary +##bling +utility +##iro +richards +1837 +conspiracy +conscious +shining +footsteps +observer +cyprus +urged +loyalty +developer +probability +olive +upgraded +gym +miracle +insects +graves +1844 +ourselves +hydrogen +amazon +katie +tickets +poets +##pm +planes +##pan +prevention +witnessed +dense +jin +randy +tang +warehouse +monroe +bang +archived +elderly +investigations +alec +granite +mineral +conflicts +controlling +aboriginal +carlo +##zu +mechanics +stan +stark +rhode +skirt +est +##berry +bombs +respected +##horn +imposed +limestone +deny +nominee +memphis +grabbing +disabled +##als +amusement +aa +frankfurt +corn +referendum +varies +slowed +disk +firms +unconscious +incredible +clue +sue +##zhou +twist +##cio +joins +idaho +chad +developers +computing +destroyer +103 +mortal +tucker +kingston +choices +yu +carson +1800 +os +whitney +geneva +pretend +dimension +staged +plateau +maya +##une +freestyle +##bc +rovers +hiv +##ids +tristan +classroom +prospect +##hus +honestly +diploma +lied +thermal +auxiliary +feast +unlikely +iata +##tel +morocco +pounding +treasury +lithuania +considerably +1841 +dish +1812 +geological +matching +stumbled +destroying +marched +brien +advances +cake +nicole +belle +settling +measuring +directing +##mie +tuesday +bassist +capabilities +stunned +fraud +torpedo +##list +##phone +anton +wisdom +surveillance +ruined +##ulate +lawsuit +healthcare +theorem +halls +trend +aka +horizontal +dozens +acquire +lasting +swim +hawk +gorgeous +fees +vicinity +decrease +adoption +tactics +##ography +pakistani +##ole +draws +##hall +willie +burke +heath +algorithm +integral +powder +elliott +brigadier +jackie +tate +varieties +darker +##cho +lately +cigarette +specimens +adds +##ree +##ensis +##inger +exploded +finalist +cia +murders +wilderness +arguments +nicknamed +acceptance +onwards +manufacture +robertson +jets +tampa +enterprises +blog +loudly +composers +nominations +1838 +ai +malta +inquiry +automobile +hosting +viii +rays +tilted +grief +museums +strategies +furious +euro +equality +cohen +poison +surrey +wireless +governed +ridiculous +moses +##esh +##room +vanished +##ito +barnes +attract +morrison +istanbul +##iness +absent +rotation +petition +janet +##logical +satisfaction +custody +deliberately +observatory +comedian +surfaces +pinyin +novelist +strictly +canterbury +oslo +monks +embrace +ibm +jealous +photograph +continent +dorothy +marina +doc +excess +holden +allegations +explaining +stack +avoiding +lance +storyline +majesty +poorly +spike +dos +bradford +raven +travis +classics +proven +voltage +pillow +fists +butt +1842 +interpreted +##car +1839 +gage +telegraph +lens +promising +expelled +casual +collector +zones +##min +silly +nintendo +##kh +##bra +downstairs +chef +suspicious +afl +flies +vacant +uganda +pregnancy +condemned +lutheran +estimates +cheap +decree +saxon +proximity +stripped +idiot +deposits +contrary +presenter +magnus +glacier +im +offense +edwin +##ori +upright +##long +bolt +##ois +toss +geographical +##izes +environments +delicate +marking +abstract +xavier +nails +windsor +plantation +occurring +equity +saskatchewan +fears +drifted +sequences +vegetation +revolt +##stic +1843 +sooner +fusion +opposing +nato +skating +1836 +secretly +ruin +lease +##oc +edit +##nne +flora +anxiety +ruby +##ological +##mia +tel +bout +taxi +emmy +frost +rainbow +compounds +foundations +rainfall +assassination +nightmare +dominican +##win +achievements +deserve +orlando +intact +armenia +##nte +calgary +valentine +106 +marion +proclaimed +theodore +bells +courtyard +thigh +gonzalez +console +troop +minimal +monte +everyday +##ence +##if +supporter +terrorism +buck +openly +presbyterian +activists +carpet +##iers +rubbing +uprising +##yi +cute +conceived +legally +##cht +millennium +cello +velocity +ji +rescued +cardiff +1835 +rex +concentrate +senators +beard +rendered +glowing +battalions +scouts +competitors +sculptor +catalogue +arctic +ion +raja +bicycle +wow +glancing +lawn +##woman +gentleman +lighthouse +publish +predicted +calculated +##val +variants +##gne +strain +##ui +winston +deceased +##nus +touchdowns +brady +caleb +sinking +echoed +crush +hon +blessed +protagonist +hayes +endangered +magnitude +editors +##tine +estimate +responsibilities +##mel +backup +laying +consumed +sealed +zurich +lovers +frustrated +##eau +ahmed +kicking +mit +treasurer +1832 +biblical +refuse +terrified +pump +agrees +genuine +imprisonment +refuses +plymouth +##hen +lou +##nen +tara +trembling +antarctic +ton +learns +##tas +crap +crucial +faction +atop +##borough +wrap +lancaster +odds +hopkins +erik +lyon +##eon +bros +##ode +snap +locality +tips +empress +crowned +cal +acclaimed +chuckled +##ory +clara +sends +mild +towel +##fl +##day +##а +wishing +assuming +interviewed +##bal +##die +interactions +eden +cups +helena +##lf +indie +beck +##fire +batteries +filipino +wizard +parted +##lam +traces +##born +rows +idol +albany +delegates +##ees +##sar +discussions +##ex +notre +instructed +belgrade +highways +suggestion +lauren +possess +orientation +alexandria +abdul +beats +salary +reunion +ludwig +alright +wagner +intimate +pockets +slovenia +hugged +brighton +merchants +cruel +stole +trek +slopes +repairs +enrollment +politically +underlying +promotional +counting +boeing +##bb +isabella +naming +##и +keen +bacteria +listing +separately +belfast +ussr +450 +lithuanian +anybody +ribs +sphere +martinez +cock +embarrassed +proposals +fragments +nationals +##fs +##wski +premises +fin +1500 +alpine +matched +freely +bounded +jace +sleeve +##af +gaming +pier +populated +evident +##like +frances +flooded +##dle +frightened +pour +trainer +framed +visitor +challenging +pig +wickets +##fold +infected +email +##pes +arose +##aw +reward +ecuador +oblast +vale +ch +shuttle +##usa +bach +rankings +forbidden +cornwall +accordance +salem +consumers +bruno +fantastic +toes +machinery +resolved +julius +remembering +propaganda +iceland +bombardment +tide +contacts +wives +##rah +concerto +macdonald +albania +implement +daisy +tapped +sudan +helmet +angela +mistress +##lic +crop +sunk +finest +##craft +hostile +##ute +##tsu +boxer +fr +paths +adjusted +habit +ballot +supervision +soprano +##zen +bullets +wicked +sunset +regiments +disappear +lamp +performs +app +##gia +##oa +rabbit +digging +incidents +entries +##cion +dishes +##oi +introducing +##ati +##fied +freshman +slot +jill +tackles +baroque +backs +##iest +lone +sponsor +destiny +altogether +convert +##aro +consensus +shapes +demonstration +basically +feminist +auction +artifacts +##bing +strongest +twitter +halifax +2019 +allmusic +mighty +smallest +precise +alexandra +viola +##los +##ille +manuscripts +##illo +dancers +ari +managers +monuments +blades +barracks +springfield +maiden +consolidated +electron +##end +berry +airing +wheat +nobel +inclusion +blair +payments +geography +bee +cc +eleanor +react +##hurst +afc +manitoba +##yu +su +lineup +fitness +recreational +investments +airborne +disappointment +##dis +edmonton +viewing +##row +renovation +##cast +infant +bankruptcy +roses +aftermath +pavilion +##yer +carpenter +withdrawal +ladder +##hy +discussing +popped +reliable +agreements +rochester +##abad +curves +bombers +220 +rao +reverend +decreased +choosing +107 +stiff +consulting +naples +crawford +tracy +ka +ribbon +cops +##lee +crushed +deciding +unified +teenager +accepting +flagship +explorer +poles +sanchez +inspection +revived +skilled +induced +exchanged +flee +locals +tragedy +swallow +loading +hanna +demonstrate +##ela +salvador +flown +contestants +civilization +##ines +wanna +rhodes +fletcher +hector +knocking +considers +##ough +nash +mechanisms +sensed +mentally +walt +unclear +##eus +renovated +madame +##cks +crews +governmental +##hin +undertaken +monkey +##ben +##ato +fatal +armored +copa +caves +governance +grasp +perception +certification +froze +damp +tugged +wyoming +##rg +##ero +newman +##lor +nerves +curiosity +graph +115 +##ami +withdraw +tunnels +dull +meredith +moss +exhibits +neighbors +communicate +accuracy +explored +raiders +republicans +secular +kat +superman +penny +criticised +##tch +freed +update +conviction +wade +ham +likewise +delegation +gotta +doll +promises +technological +myth +nationality +resolve +convent +##mark +sharon +dig +sip +coordinator +entrepreneur +fold +##dine +capability +councillor +synonym +blown +swan +cursed +1815 +jonas +haired +sofa +canvas +keeper +rivalry +##hart +rapper +speedway +swords +postal +maxwell +estonia +potter +recurring +##nn +##ave +errors +##oni +cognitive +1834 +##² +claws +nadu +roberto +bce +wrestler +ellie +##ations +infinite +ink +##tia +presumably +finite +staircase +108 +noel +patricia +nacional +##cation +chill +eternal +tu +preventing +prussia +fossil +limbs +##logist +ernst +frog +perez +rene +##ace +pizza +prussian +##ios +##vy +molecules +regulatory +answering +opinions +sworn +lengths +supposedly +hypothesis +upward +habitats +seating +ancestors +drank +yield +hd +synthesis +researcher +modest +##var +mothers +peered +voluntary +homeland +##the +acclaim +##igan +static +valve +luxembourg +alto +carroll +fe +receptor +norton +ambulance +##tian +johnston +catholics +depicting +jointly +elephant +gloria +mentor +badge +ahmad +distinguish +remarked +councils +precisely +allison +advancing +detection +crowded +##10 +cooperative +ankle +mercedes +dagger +surrendered +pollution +commit +subway +jeffrey +lesson +sculptures +provider +##fication +membrane +timothy +rectangular +fiscal +heating +teammate +basket +particle +anonymous +deployment +##ple +missiles +courthouse +proportion +shoe +sec +##ller +complaints +forbes +blacks +abandon +remind +sizes +overwhelming +autobiography +natalie +##awa +risks +contestant +countryside +babies +scorer +invaded +enclosed +proceed +hurling +disorders +##cu +reflecting +continuously +cruiser +graduates +freeway +investigated +ore +deserved +maid +blocking +phillip +jorge +shakes +dove +mann +variables +lacked +burden +accompanying +que +consistently +organizing +provisional +complained +endless +##rm +tubes +juice +georges +krishna +mick +labels +thriller +##uch +laps +arcade +sage +snail +##table +shannon +fi +laurence +seoul +vacation +presenting +hire +churchill +surprisingly +prohibited +savannah +technically +##oli +170 +##lessly +testimony +suited +speeds +toys +romans +mlb +flowering +measurement +talented +kay +settings +charleston +expectations +shattered +achieving +triumph +ceremonies +portsmouth +lanes +mandatory +loser +stretching +cologne +realizes +seventy +cornell +careers +webb +##ulating +americas +budapest +ava +suspicion +##ison +yo +conrad +##hai +sterling +jessie +rector +##az +1831 +transform +organize +loans +christine +volcanic +warrant +slender +summers +subfamily +newer +danced +dynamics +rhine +proceeds +heinrich +gastropod +commands +sings +facilitate +easter +ra +positioned +responses +expense +fruits +yanked +imported +25th +velvet +vic +primitive +tribune +baldwin +neighbourhood +donna +rip +hay +pr +##uro +1814 +espn +welcomed +##aria +qualifier +glare +highland +timing +##cted +shells +eased +geometry +louder +exciting +slovakia +##sion +##iz +##lot +savings +prairie +##ques +marching +rafael +tonnes +##lled +curtain +preceding +shy +heal +greene +worthy +##pot +detachment +bury +sherman +##eck +reinforced +seeks +bottles +contracted +duchess +outfit +walsh +##sc +mickey +##ase +geoffrey +archer +squeeze +dawson +eliminate +invention +##enberg +neal +##eth +stance +dealer +coral +maple +retire +polo +simplified +##ht +1833 +hid +watts +backwards +jules +##oke +genesis +mt +frames +rebounds +burma +woodland +moist +santos +whispers +drained +subspecies +##aa +streaming +ulster +burnt +correspondence +maternal +gerard +denis +stealing +##load +genius +duchy +##oria +inaugurated +momentum +suits +placement +sovereign +clause +thames +##hara +confederation +reservation +sketch +yankees +lets +rotten +charm +hal +verses +ultra +commercially +dot +salon +citation +adopt +winnipeg +mist +allocated +cairo +##boy +jenkins +interference +objectives +##wind +1820 +portfolio +armoured +sectors +##eh +initiatives +##world +integrity +exercises +robe +tap +ab +gazed +##tones +distracted +rulers +111 +favorable +jerome +tended +cart +factories +##eri +diplomat +valued +gravel +charitable +##try +calvin +exploring +chang +shepherd +terrace +pdf +pupil +##ural +reflects +ups +##rch +governors +shelf +depths +##nberg +trailed +crest +tackle +##nian +##ats +hatred +##kai +clare +makers +ethiopia +longtime +detected +embedded +lacking +slapped +rely +thomson +anticipation +iso +morton +successive +agnes +screenwriter +straightened +philippe +playwright +haunted +licence +iris +intentions +sutton +112 +logical +correctly +##weight +branded +licked +tipped +silva +ricky +narrator +requests +##ents +greeted +supernatural +cow +##wald +lung +refusing +employer +strait +gaelic +liner +##piece +zoe +sabha +##mba +driveway +harvest +prints +bates +reluctantly +threshold +algebra +ira +wherever +coupled +240 +assumption +picks +##air +designers +raids +gentlemen +##ean +roller +blowing +leipzig +locks +screw +dressing +strand +##lings +scar +dwarf +depicts +##nu +nods +##mine +differ +boris +##eur +yuan +flip +##gie +mob +invested +questioning +applying +##ture +shout +##sel +gameplay +blamed +illustrations +bothered +weakness +rehabilitation +##of +##zes +envelope +rumors +miners +leicester +subtle +kerry +##ico +ferguson +##fu +premiership +ne +##cat +bengali +prof +catches +remnants +dana +##rily +shouting +presidents +baltic +ought +ghosts +dances +sailors +shirley +fancy +dominic +##bie +madonna +##rick +bark +buttons +gymnasium +ashes +liver +toby +oath +providence +doyle +evangelical +nixon +cement +carnegie +embarked +hatch +surroundings +guarantee +needing +pirate +essence +##bee +filter +crane +hammond +projected +immune +percy +twelfth +##ult +regent +doctoral +damon +mikhail +##ichi +lu +critically +elect +realised +abortion +acute +screening +mythology +steadily +##fc +frown +nottingham +kirk +wa +minneapolis +##rra +module +algeria +mc +nautical +encounters +surprising +statues +availability +shirts +pie +alma +brows +munster +mack +soup +crater +tornado +sanskrit +cedar +explosive +bordered +dixon +planets +stamp +exam +happily +##bble +carriers +kidnapped +##vis +accommodation +emigrated +##met +knockout +correspondent +violation +profits +peaks +lang +specimen +agenda +ancestry +pottery +spelling +equations +obtaining +ki +linking +1825 +debris +asylum +##20 +buddhism +teddy +##ants +gazette +##nger +##sse +dental +eligibility +utc +fathers +averaged +zimbabwe +francesco +coloured +hissed +translator +lynch +mandate +humanities +mackenzie +uniforms +lin +##iana +##gio +asset +mhz +fitting +samantha +genera +wei +rim +beloved +shark +riot +entities +expressions +indo +carmen +slipping +owing +abbot +neighbor +sidney +##av +rats +recommendations +encouraging +squadrons +anticipated +commanders +conquered +##oto +donations +diagnosed +##mond +divide +##iva +guessed +decoration +vernon +auditorium +revelation +conversations +##kers +##power +herzegovina +dash +alike +protested +lateral +herman +accredited +mg +##gent +freeman +mel +fiji +crow +crimson +##rine +livestock +##pped +humanitarian +bored +oz +whip +##lene +##ali +legitimate +alter +grinning +spelled +anxious +oriental +wesley +##nin +##hole +carnival +controller +detect +##ssa +bowed +educator +kosovo +macedonia +##sin +occupy +mastering +stephanie +janeiro +para +unaware +nurses +noon +135 +cam +hopefully +ranger +combine +sociology +polar +rica +##eer +neill +##sman +holocaust +##ip +doubled +lust +1828 +109 +decent +cooling +unveiled +##card +1829 +nsw +homer +chapman +meyer +##gin +dive +mae +reagan +expertise +##gled +darwin +brooke +sided +prosecution +investigating +comprised +petroleum +genres +reluctant +differently +trilogy +johns +vegetables +corpse +highlighted +lounge +pension +unsuccessfully +elegant +aided +ivory +beatles +amelia +cain +dubai +sunny +immigrant +babe +click +##nder +underwater +pepper +combining +mumbled +atlas +horns +accessed +ballad +physicians +homeless +gestured +rpm +freak +louisville +corporations +patriots +prizes +rational +warn +modes +decorative +overnight +din +troubled +phantom +##ort +monarch +sheer +##dorf +generals +guidelines +organs +addresses +##zon +enhance +curling +parishes +cord +##kie +linux +caesar +deutsche +bavaria +##bia +coleman +cyclone +##eria +bacon +petty +##yama +##old +hampton +diagnosis +1824 +throws +complexity +rita +disputed +##₃ +pablo +##sch +marketed +trafficking +##ulus +examine +plague +formats +##oh +vault +faithful +##bourne +webster +##ox +highlights +##ient +##ann +phones +vacuum +sandwich +modeling +##gated +bolivia +clergy +qualities +isabel +##nas +##ars +wears +screams +reunited +annoyed +bra +##ancy +##rate +differential +transmitter +tattoo +container +poker +##och +excessive +resides +cowboys +##tum +augustus +trash +providers +statute +retreated +balcony +reversed +void +storey +preceded +masses +leap +laughs +neighborhoods +wards +schemes +falcon +santo +battlefield +pad +ronnie +thread +lesbian +venus +##dian +beg +sandstone +daylight +punched +gwen +analog +stroked +wwe +acceptable +measurements +dec +toxic +##kel +adequate +surgical +economist +parameters +varsity +##sberg +quantity +ella +##chy +##rton +countess +generating +precision +diamonds +expressway +ga +##ı +1821 +uruguay +talents +galleries +expenses +scanned +colleague +outlets +ryder +lucien +##ila +paramount +##bon +syracuse +dim +fangs +gown +sweep +##sie +toyota +missionaries +websites +##nsis +sentences +adviser +val +trademark +spells +##plane +patience +starter +slim +##borg +toe +incredibly +shoots +elliot +nobility +##wyn +cowboy +endorsed +gardner +tendency +persuaded +organisms +emissions +kazakhstan +amused +boring +chips +themed +##hand +llc +constantinople +chasing +systematic +guatemala +borrowed +erin +carey +##hard +highlands +struggles +1810 +##ifying +##ced +wong +exceptions +develops +enlarged +kindergarten +castro +##ern +##rina +leigh +zombie +juvenile +##most +consul +##nar +sailor +hyde +clarence +intensive +pinned +nasty +useless +jung +clayton +stuffed +exceptional +ix +apostolic +230 +transactions +##dge +exempt +swinging +cove +religions +##ash +shields +dairy +bypass +190 +pursuing +bug +joyce +bombay +chassis +southampton +chat +interact +redesignated +##pen +nascar +pray +salmon +rigid +regained +malaysian +grim +publicity +constituted +capturing +toilet +delegate +purely +tray +drift +loosely +striker +weakened +trinidad +mitch +itv +defines +transmitted +ming +scarlet +nodding +fitzgerald +fu +narrowly +sp +tooth +standings +virtue +##₁ +##wara +##cting +chateau +gloves +lid +##nel +hurting +conservatory +##pel +sinclair +reopened +sympathy +nigerian +strode +advocated +optional +chronic +discharge +##rc +suck +compatible +laurel +stella +shi +fails +wage +dodge +128 +informal +sorts +levi +buddha +villagers +##aka +chronicles +heavier +summoned +gateway +3000 +eleventh +jewelry +translations +accordingly +seas +##ency +fiber +pyramid +cubic +dragging +##ista +caring +##ops +android +contacted +lunar +##dt +kai +lisbon +patted +1826 +sacramento +theft +madagascar +subtropical +disputes +ta +holidays +piper +willow +mare +cane +itunes +newfoundland +benny +companions +dong +raj +observe +roar +charming +plaque +tibetan +fossils +enacted +manning +bubble +tina +tanzania +##eda +##hir +funk +swamp +deputies +cloak +ufc +scenario +par +scratch +metals +anthem +guru +engaging +specially +##boat +dialects +nineteen +cecil +duet +disability +messenger +unofficial +##lies +defunct +eds +moonlight +drainage +surname +puzzle +honda +switching +conservatives +mammals +knox +broadcaster +sidewalk +cope +##ried +benson +princes +peterson +##sal +bedford +sharks +eli +wreck +alberto +gasp +archaeology +lgbt +teaches +securities +madness +compromise +waving +coordination +davidson +visions +leased +possibilities +eighty +jun +fernandez +enthusiasm +assassin +sponsorship +reviewer +kingdoms +estonian +laboratories +##fy +##nal +applies +verb +celebrations +##zzo +rowing +lightweight +sadness +submit +mvp +balanced +dude +##vas +explicitly +metric +magnificent +mound +brett +mohammad +mistakes +irregular +##hing +##ass +sanders +betrayed +shipped +surge +##enburg +reporters +termed +georg +pity +verbal +bulls +abbreviated +enabling +appealed +##are +##atic +sicily +sting +heel +sweetheart +bart +spacecraft +brutal +monarchy +##tter +aberdeen +cameo +diane +##ub +survivor +clyde +##aries +complaint +##makers +clarinet +delicious +chilean +karnataka +coordinates +1818 +panties +##rst +pretending +ar +dramatically +kiev +bella +tends +distances +113 +catalog +launching +instances +telecommunications +portable +lindsay +vatican +##eim +angles +aliens +marker +stint +screens +bolton +##rne +judy +wool +benedict +plasma +europa +spark +imaging +filmmaker +swiftly +##een +contributor +##nor +opted +stamps +apologize +financing +butter +gideon +sophisticated +alignment +avery +chemicals +yearly +speculation +prominence +professionally +##ils +immortal +institutional +inception +wrists +identifying +tribunal +derives +gains +##wo +papal +preference +linguistic +vince +operative +brewery +##ont +unemployment +boyd +##ured +##outs +albeit +prophet +1813 +bi +##rr +##face +##rad +quarterly +asteroid +cleaned +radius +temper +##llen +telugu +jerk +viscount +menu +##ote +glimpse +##aya +yacht +hawaiian +baden +##rl +laptop +readily +##gu +monetary +offshore +scots +watches +##yang +##arian +upgrade +needle +xbox +lea +encyclopedia +flank +fingertips +##pus +delight +teachings +confirm +roth +beaches +midway +winters +##iah +teasing +daytime +beverly +gambling +bonnie +##backs +regulated +clement +hermann +tricks +knot +##shing +##uring +##vre +detached +ecological +owed +specialty +byron +inventor +bats +stays +screened +unesco +midland +trim +affection +##ander +##rry +jess +thoroughly +feedback +##uma +chennai +strained +heartbeat +wrapping +overtime +pleaded +##sworth +mon +leisure +oclc +##tate +##ele +feathers +angelo +thirds +nuts +surveys +clever +gill +commentator +##dos +darren +rides +gibraltar +##nc +##mu +dissolution +dedication +shin +meals +saddle +elvis +reds +chaired +taller +appreciation +functioning +niece +favored +advocacy +robbie +criminals +suffolk +yugoslav +passport +constable +congressman +hastings +vera +##rov +consecrated +sparks +ecclesiastical +confined +##ovich +muller +floyd +nora +1822 +paved +1827 +cumberland +ned +saga +spiral +##flow +appreciated +yi +collaborative +treating +similarities +feminine +finishes +##ib +jade +import +##nse +##hot +champagne +mice +securing +celebrities +helsinki +attributes +##gos +cousins +phases +ache +lucia +gandhi +submission +vicar +spear +shine +tasmania +biting +detention +constitute +tighter +seasonal +##gus +terrestrial +matthews +##oka +effectiveness +parody +philharmonic +##onic +1816 +strangers +encoded +consortium +guaranteed +regards +shifts +tortured +collision +supervisor +inform +broader +insight +theaters +armour +emeritus +blink +incorporates +mapping +##50 +##ein +handball +flexible +##nta +substantially +generous +thief +##own +carr +loses +1793 +prose +ucla +romeo +generic +metallic +realization +damages +mk +commissioners +zach +default +##ther +helicopters +lengthy +stems +spa +partnered +spectators +rogue +indication +penalties +teresa +1801 +sen +##tric +dalton +##wich +irving +photographic +##vey +dell +deaf +peters +excluded +unsure +##vable +patterson +crawled +##zio +resided +whipped +latvia +slower +ecole +pipes +employers +maharashtra +comparable +va +textile +pageant +##gel +alphabet +binary +irrigation +chartered +choked +antoine +offs +waking +supplement +##wen +quantities +demolition +regain +locate +urdu +folks +alt +114 +##mc +scary +andreas +whites +##ava +classrooms +mw +aesthetic +publishes +valleys +guides +cubs +johannes +bryant +conventions +affecting +##itt +drain +awesome +isolation +prosecutor +ambitious +apology +captive +downs +atmospheric +lorenzo +aisle +beef +foul +##onia +kidding +composite +disturbed +illusion +natives +##ffer +emi +rockets +riverside +wartime +painters +adolf +melted +##ail +uncertainty +simulation +hawks +progressed +meantime +builder +spray +breach +unhappy +regina +russians +##urg +determining +##tation +tram +1806 +##quin +aging +##12 +1823 +garion +rented +mister +diaz +terminated +clip +1817 +depend +nervously +disco +owe +defenders +shiva +notorious +disbelief +shiny +worcester +##gation +##yr +trailing +undertook +islander +belarus +limitations +watershed +fuller +overlooking +utilized +raphael +1819 +synthetic +breakdown +klein +##nate +moaned +memoir +lamb +practicing +##erly +cellular +arrows +exotic +##graphy +witches +117 +charted +rey +hut +hierarchy +subdivision +freshwater +giuseppe +aloud +reyes +qatar +marty +sideways +utterly +sexually +jude +prayers +mccarthy +softball +blend +damien +##gging +##metric +wholly +erupted +lebanese +negro +revenues +tasted +comparative +teamed +transaction +labeled +maori +sovereignty +parkway +trauma +gran +malay +121 +advancement +descendant +2020 +buzz +salvation +inventory +symbolic +##making +antarctica +mps +##gas +##bro +mohammed +myanmar +holt +submarines +tones +##lman +locker +patriarch +bangkok +emerson +remarks +predators +kin +afghan +confession +norwich +rental +emerge +advantages +##zel +rca +##hold +shortened +storms +aidan +##matic +autonomy +compliance +##quet +dudley +atp +##osis +1803 +motto +documentation +summary +professors +spectacular +christina +archdiocese +flashing +innocence +remake +##dell +psychic +reef +scare +employ +rs +sticks +meg +gus +leans +##ude +accompany +bergen +tomas +##iko +doom +wages +pools +##nch +##bes +breasts +scholarly +alison +outline +brittany +breakthrough +willis +realistic +##cut +##boro +competitor +##stan +pike +picnic +icon +designing +commercials +washing +villain +skiing +micro +costumes +auburn +halted +executives +##hat +logistics +cycles +vowel +applicable +barrett +exclaimed +eurovision +eternity +ramon +##umi +##lls +modifications +sweeping +disgust +##uck +torch +aviv +ensuring +rude +dusty +sonic +donovan +outskirts +cu +pathway +##band +##gun +##lines +disciplines +acids +cadet +paired +##40 +sketches +##sive +marriages +##⁺ +folding +peers +slovak +implies +admired +##beck +1880s +leopold +instinct +attained +weston +megan +horace +##ination +dorsal +ingredients +evolutionary +##its +complications +deity +lethal +brushing +levy +deserted +institutes +posthumously +delivering +telescope +coronation +motivated +rapids +luc +flicked +pays +volcano +tanner +weighed +##nica +crowds +frankie +gifted +addressing +granddaughter +winding +##rna +constantine +gomez +##front +landscapes +rudolf +anthropology +slate +werewolf +##lio +astronomy +circa +rouge +dreaming +sack +knelt +drowned +naomi +prolific +tracked +freezing +herb +##dium +agony +randall +twisting +wendy +deposit +touches +vein +wheeler +##bbled +##bor +batted +retaining +tire +presently +compare +specification +daemon +nigel +##grave +merry +recommendation +czechoslovakia +sandra +ng +roma +##sts +lambert +inheritance +sheikh +winchester +cries +examining +##yle +comeback +cuisine +nave +##iv +ko +retrieve +tomatoes +barker +polished +defining +irene +lantern +personalities +begging +tract +swore +1809 +175 +##gic +omaha +brotherhood +##rley +haiti +##ots +exeter +##ete +##zia +steele +dumb +pearson +210 +surveyed +elisabeth +trends +##ef +fritz +##rf +premium +bugs +fraction +calmly +viking +##birds +tug +inserted +unusually +##ield +confronted +distress +crashing +brent +turks +resign +##olo +cambodia +gabe +sauce +##kal +evelyn +116 +extant +clusters +quarry +teenagers +luna +##lers +##ister +affiliation +drill +##ashi +panthers +scenic +libya +anita +strengthen +inscriptions +##cated +lace +sued +judith +riots +##uted +mint +##eta +preparations +midst +dub +challenger +##vich +mock +cf +displaced +wicket +breaths +enables +schmidt +analyst +##lum +ag +highlight +automotive +axe +josef +newark +sufficiently +resembles +50th +##pal +flushed +mum +traits +##ante +commodore +incomplete +warming +titular +ceremonial +ethical +118 +celebrating +eighteenth +cao +lima +medalist +mobility +strips +snakes +##city +miniature +zagreb +barton +escapes +umbrella +automated +doubted +differs +cooled +georgetown +dresden +cooked +fade +wyatt +rna +jacobs +carlton +abundant +stereo +boost +madras +inning +##hia +spur +ip +malayalam +begged +osaka +groan +escaping +charging +dose +vista +##aj +bud +papa +communists +advocates +edged +tri +##cent +resemble +peaking +necklace +fried +montenegro +saxony +goose +glances +stuttgart +curator +recruit +grocery +sympathetic +##tting +##fort +127 +lotus +randolph +ancestor +##rand +succeeding +jupiter +1798 +macedonian +##heads +hiking +1808 +handing +fischer +##itive +garbage +node +##pies +prone +singular +papua +inclined +attractions +italia +pouring +motioned +grandma +garnered +jacksonville +corp +ego +ringing +aluminum +##hausen +ordering +##foot +drawer +traders +synagogue +##play +##kawa +resistant +wandering +fragile +fiona +teased +var +hardcore +soaked +jubilee +decisive +exposition +mercer +poster +valencia +hale +kuwait +1811 +##ises +##wr +##eed +tavern +gamma +122 +johan +##uer +airways +amino +gil +##ury +vocational +domains +torres +##sp +generator +folklore +outcomes +##keeper +canberra +shooter +fl +beams +confrontation +##lling +##gram +feb +aligned +forestry +pipeline +jax +motorway +conception +decay +##tos +coffin +##cott +stalin +1805 +escorted +minded +##nam +sitcom +purchasing +twilight +veronica +additions +passive +tensions +straw +123 +frequencies +1804 +refugee +cultivation +##iate +christie +clary +bulletin +crept +disposal +##rich +##zong +processor +crescent +##rol +bmw +emphasized +whale +nazis +aurora +##eng +dwelling +hauled +sponsors +toledo +mega +ideology +theatres +tessa +cerambycidae +saves +turtle +cone +suspects +kara +rusty +yelling +greeks +mozart +shades +cocked +participant +##tro +shire +spit +freeze +necessity +##cos +inmates +nielsen +councillors +loaned +uncommon +omar +peasants +botanical +offspring +daniels +formations +jokes +1794 +pioneers +sigma +licensing +##sus +wheelchair +polite +1807 +liquor +pratt +trustee +##uta +forewings +balloon +##zz +kilometre +camping +explicit +casually +shawn +foolish +teammates +nm +hassan +carrie +judged +satisfy +vanessa +knives +selective +cnn +flowed +##lice +eclipse +stressed +eliza +mathematician +cease +cultivated +##roy +commissions +browns +##ania +destroyers +sheridan +meadow +##rius +minerals +##cial +downstream +clash +gram +memoirs +ventures +baha +seymour +archie +midlands +edith +fare +flynn +invite +canceled +tiles +stabbed +boulder +incorporate +amended +camden +facial +mollusk +unreleased +descriptions +yoga +grabs +550 +raises +ramp +shiver +##rose +coined +pioneering +tunes +qing +warwick +tops +119 +melanie +giles +##rous +wandered +##inal +annexed +nov +30th +unnamed +##ished +organizational +airplane +normandy +stoke +whistle +blessing +violations +chased +holders +shotgun +##ctic +outlet +reactor +##vik +tires +tearing +shores +fortified +mascot +constituencies +nc +columnist +productive +tibet +##rta +lineage +hooked +oct +tapes +judging +cody +##gger +hansen +kashmir +triggered +##eva +solved +cliffs +##tree +resisted +anatomy +protesters +transparent +implied +##iga +injection +mattress +excluding +##mbo +defenses +helpless +devotion +##elli +growl +liberals +weber +phenomena +atoms +plug +##iff +mortality +apprentice +howe +convincing +aaa +swimmer +barber +leone +promptly +sodium +def +nowadays +arise +##oning +gloucester +corrected +dignity +norm +erie +##ders +elders +evacuated +sylvia +compression +##yar +hartford +pose +backpack +reasoning +accepts +24th +wipe +millimetres +marcel +##oda +dodgers +albion +1790 +overwhelmed +aerospace +oaks +1795 +showcase +acknowledge +recovering +nolan +ashe +hurts +geology +fashioned +disappearance +farewell +swollen +shrug +marquis +wimbledon +124 +rue +1792 +commemorate +reduces +experiencing +inevitable +calcutta +intel +##court +murderer +sticking +fisheries +imagery +bloom +280 +brake +##inus +gustav +hesitation +memorable +po +viral +beans +accidents +tunisia +antenna +spilled +consort +treatments +aye +perimeter +##gard +donation +hostage +migrated +banker +addiction +apex +lil +trout +##ously +conscience +##nova +rams +sands +genome +passionate +troubles +##lets +##set +amid +##ibility +##ret +higgins +exceed +vikings +##vie +payne +##zan +muscular +##ste +defendant +sucking +##wal +ibrahim +fuselage +claudia +vfl +europeans +snails +interval +##garh +preparatory +statewide +tasked +lacrosse +viktor +##lation +angola +##hra +flint +implications +employs +teens +patrons +stall +weekends +barriers +scrambled +nucleus +tehran +jenna +parsons +lifelong +robots +displacement +5000 +##bles +precipitation +##gt +knuckles +clutched +1802 +marrying +ecology +marx +accusations +declare +scars +kolkata +mat +meadows +bermuda +skeleton +finalists +vintage +crawl +coordinate +affects +subjected +orchestral +mistaken +##tc +mirrors +dipped +relied +260 +arches +candle +##nick +incorporating +wildly +fond +basilica +owl +fringe +rituals +whispering +stirred +feud +tertiary +slick +goat +honorable +whereby +skip +ricardo +stripes +parachute +adjoining +submerged +synthesizer +##gren +intend +positively +ninety +phi +beaver +partition +fellows +alexis +prohibition +carlisle +bizarre +fraternity +##bre +doubts +icy +cbc +aquatic +sneak +sonny +combines +airports +crude +supervised +spatial +merge +alfonso +##bic +corrupt +scan +undergo +##ams +disabilities +colombian +comparing +dolphins +perkins +##lish +reprinted +unanimous +bounced +hairs +underworld +midwest +semester +bucket +paperback +miniseries +coventry +demise +##leigh +demonstrations +sensor +rotating +yan +##hler +arrange +soils +##idge +hyderabad +labs +##dr +brakes +grandchildren +##nde +negotiated +rover +ferrari +continuation +directorate +augusta +stevenson +counterpart +gore +##rda +nursery +rican +ave +collectively +broadly +pastoral +repertoire +asserted +discovering +nordic +styled +fiba +cunningham +harley +middlesex +survives +tumor +tempo +zack +aiming +lok +urgent +##rade +##nto +devils +##ement +contractor +turin +##wl +##ool +bliss +repaired +simmons +moan +astronomical +cr +negotiate +lyric +1890s +lara +bred +clad +angus +pbs +##ience +engineered +posed +##lk +hernandez +possessions +elbows +psychiatric +strokes +confluence +electorate +lifts +campuses +lava +alps +##ep +##ution +##date +physicist +woody +##page +##ographic +##itis +juliet +reformation +sparhawk +320 +complement +suppressed +jewel +##½ +floated +##kas +continuity +sadly +##ische +inability +melting +scanning +paula +flour +judaism +safer +vague +##lm +solving +curb +##stown +financially +gable +bees +expired +miserable +cassidy +dominion +1789 +cupped +145 +robbery +facto +amos +warden +resume +tallest +marvin +ing +pounded +usd +declaring +gasoline +##aux +darkened +270 +650 +sophomore +##mere +erection +gossip +televised +risen +dial +##eu +pillars +##link +passages +profound +##tina +arabian +ashton +silicon +nail +##ead +##lated +##wer +##hardt +fleming +firearms +ducked +circuits +blows +waterloo +titans +##lina +atom +fireplace +cheshire +financed +activation +algorithms +##zzi +constituent +catcher +cherokee +partnerships +sexuality +platoon +tragic +vivian +guarded +whiskey +meditation +poetic +##late +##nga +##ake +porto +listeners +dominance +kendra +mona +chandler +factions +22nd +salisbury +attitudes +derivative +##ido +##haus +intake +paced +javier +illustrator +barrels +bias +cockpit +burnett +dreamed +ensuing +##anda +receptors +someday +hawkins +mattered +##lal +slavic +1799 +jesuit +cameroon +wasted +tai +wax +lowering +victorious +freaking +outright +hancock +librarian +sensing +bald +calcium +myers +tablet +announcing +barack +shipyard +pharmaceutical +##uan +greenwich +flush +medley +patches +wolfgang +pt +speeches +acquiring +exams +nikolai +##gg +hayden +kannada +##type +reilly +##pt +waitress +abdomen +devastated +capped +pseudonym +pharmacy +fulfill +paraguay +1796 +clicked +##trom +archipelago +syndicated +##hman +lumber +orgasm +rejection +clifford +lorraine +advent +mafia +rodney +brock +##ght +##used +##elia +cassette +chamberlain +despair +mongolia +sensors +developmental +upstream +##eg +##alis +spanning +165 +trombone +basque +seeded +interred +renewable +rhys +leapt +revision +molecule +##ages +chord +vicious +nord +shivered +23rd +arlington +debts +corpus +sunrise +bays +blackburn +centimetres +##uded +shuddered +gm +strangely +gripping +cartoons +isabelle +orbital +##ppa +seals +proving +##lton +refusal +strengthened +bust +assisting +baghdad +batsman +portrayal +mara +pushes +spears +og +##cock +reside +nathaniel +brennan +1776 +confirmation +caucus +##worthy +markings +yemen +nobles +ku +lazy +viewer +catalan +encompasses +sawyer +##fall +sparked +substances +patents +braves +arranger +evacuation +sergio +persuade +dover +tolerance +penguin +cum +jockey +insufficient +townships +occupying +declining +plural +processed +projection +puppet +flanders +introduces +liability +##yon +gymnastics +antwerp +taipei +hobart +candles +jeep +wes +observers +126 +chaplain +bundle +glorious +##hine +hazel +flung +sol +excavations +dumped +stares +sh +bangalore +triangular +icelandic +intervals +expressing +turbine +##vers +songwriting +crafts +##igo +jasmine +ditch +rite +##ways +entertaining +comply +sorrow +wrestlers +basel +emirates +marian +rivera +helpful +##some +caution +downward +networking +##atory +##tered +darted +genocide +emergence +replies +specializing +spokesman +convenient +unlocked +fading +augustine +concentrations +resemblance +elijah +investigator +andhra +##uda +promotes +bean +##rrell +fleeing +wan +simone +announcer +##ame +##bby +lydia +weaver +132 +residency +modification +##fest +stretches +##ast +alternatively +nat +lowe +lacks +##ented +pam +tile +concealed +inferior +abdullah +residences +tissues +vengeance +##ided +moisture +peculiar +groove +zip +bologna +jennings +ninja +oversaw +zombies +pumping +batch +livingston +emerald +installations +1797 +peel +nitrogen +rama +##fying +##star +schooling +strands +responding +werner +##ost +lime +casa +accurately +targeting +##rod +underway +##uru +hemisphere +lester +##yard +occupies +2d +griffith +angrily +reorganized +##owing +courtney +deposited +##dd +##30 +estadio +##ifies +dunn +exiled +##ying +checks +##combe +##о +##fly +successes +unexpectedly +blu +assessed +##flower +##ه +observing +sacked +spiders +kn +##tail +mu +nodes +prosperity +audrey +divisional +155 +broncos +tangled +adjust +feeds +erosion +paolo +surf +directory +snatched +humid +admiralty +screwed +gt +reddish +##nese +modules +trench +lamps +bind +leah +bucks +competes +##nz +##form +transcription +##uc +isles +violently +clutching +pga +cyclist +inflation +flats +ragged +unnecessary +##hian +stubborn +coordinated +harriet +baba +disqualified +330 +insect +wolfe +##fies +reinforcements +rocked +duel +winked +embraced +bricks +##raj +hiatus +defeats +pending +brightly +jealousy +##xton +##hm +##uki +lena +gdp +colorful +##dley +stein +kidney +##shu +underwear +wanderers +##haw +##icus +guardians +m³ +roared +habits +##wise +permits +gp +uranium +punished +disguise +bundesliga +elise +dundee +erotic +partisan +pi +collectors +float +individually +rendering +behavioral +bucharest +ser +hare +valerie +corporal +nutrition +proportional +##isa +immense +##kis +pavement +##zie +##eld +sutherland +crouched +1775 +##lp +suzuki +trades +endurance +operas +crosby +prayed +priory +rory +socially +##urn +gujarat +##pu +walton +cube +pasha +privilege +lennon +floods +thorne +waterfall +nipple +scouting +approve +##lov +minorities +voter +dwight +extensions +assure +ballroom +slap +dripping +privileges +rejoined +confessed +demonstrating +patriotic +yell +investor +##uth +pagan +slumped +squares +##cle +##kins +confront +bert +embarrassment +##aid +aston +urging +sweater +starr +yuri +brains +williamson +commuter +mortar +structured +selfish +exports +##jon +cds +##him +unfinished +##rre +mortgage +destinations +##nagar +canoe +solitary +buchanan +delays +magistrate +fk +##pling +motivation +##lier +##vier +recruiting +assess +##mouth +malik +antique +1791 +pius +rahman +reich +tub +zhou +smashed +airs +galway +xii +conditioning +honduras +discharged +dexter +##pf +lionel +129 +debates +lemon +tiffany +volunteered +dom +dioxide +procession +devi +sic +tremendous +advertisements +colts +transferring +verdict +hanover +decommissioned +utter +relate +pac +racism +##top +beacon +limp +similarity +terra +occurrence +ant +##how +becky +capt +updates +armament +richie +pal +##graph +halloween +mayo +##ssen +##bone +cara +serena +fcc +dolls +obligations +##dling +violated +lafayette +jakarta +exploitation +##ime +infamous +iconic +##lah +##park +kitty +moody +reginald +dread +spill +crystals +olivier +modeled +bluff +equilibrium +separating +notices +ordnance +extinction +onset +cosmic +attachment +sammy +expose +privy +anchored +##bil +abbott +admits +bending +baritone +emmanuel +policeman +vaughan +winged +climax +dresses +denny +polytechnic +mohamed +burmese +authentic +nikki +genetics +grandparents +homestead +gaza +postponed +metacritic +una +##sby +##bat +unstable +dissertation +##rial +##cian +curls +obscure +uncovered +bronx +praying +disappearing +##hoe +prehistoric +coke +turret +mutations +nonprofit +pits +monaco +##ي +##usion +prominently +dispatched +podium +##mir +uci +##uation +133 +fortifications +birthplace +kendall +##lby +##oll +preacher +rack +goodman +##rman +persistent +##ott +countless +jaime +recorder +lexington +persecution +jumps +renewal +wagons +##11 +crushing +##holder +decorations +##lake +abundance +wrath +laundry +£1 +garde +##rp +jeanne +beetles +peasant +##sl +splitting +caste +sergei +##rer +##ema +scripts +##ively +rub +satellites +##vor +inscribed +verlag +scrapped +gale +packages +chick +potato +slogan +kathleen +arabs +##culture +counterparts +reminiscent +choral +##tead +rand +retains +bushes +dane +accomplish +courtesy +closes +##oth +slaughter +hague +krakow +lawson +tailed +elias +ginger +##ttes +canopy +betrayal +rebuilding +turf +##hof +frowning +allegiance +brigades +kicks +rebuild +polls +alias +nationalism +td +rowan +audition +bowie +fortunately +recognizes +harp +dillon +horrified +##oro +renault +##tics +ropes +##α +presumed +rewarded +infrared +wiping +accelerated +illustration +##rid +presses +practitioners +badminton +##iard +detained +##tera +recognizing +relates +misery +##sies +##tly +reproduction +piercing +potatoes +thornton +esther +manners +hbo +##aan +ours +bullshit +ernie +perennial +sensitivity +illuminated +rupert +##jin +##iss +##ear +rfc +nassau +##dock +staggered +socialism +##haven +appointments +nonsense +prestige +sharma +haul +##tical +solidarity +gps +##ook +##rata +igor +pedestrian +##uit +baxter +tenants +wires +medication +unlimited +guiding +impacts +diabetes +##rama +sasha +pas +clive +extraction +131 +continually +constraints +##bilities +sonata +hunted +sixteenth +chu +planting +quote +mayer +pretended +abs +spat +##hua +ceramic +##cci +curtains +pigs +pitching +##dad +latvian +sore +dayton +##sted +##qi +patrols +slice +playground +##nted +shone +stool +apparatus +inadequate +mates +treason +##ija +desires +##liga +##croft +somalia +laurent +mir +leonardo +oracle +grape +obliged +chevrolet +thirteenth +stunning +enthusiastic +##ede +accounted +concludes +currents +basil +##kovic +drought +##rica +mai +##aire +shove +posting +##shed +pilgrimage +humorous +packing +fry +pencil +wines +smells +144 +marilyn +aching +newest +clung +bon +neighbours +sanctioned +##pie +mug +##stock +drowning +##mma +hydraulic +##vil +hiring +reminder +lilly +investigators +##ncies +sour +##eous +compulsory +packet +##rion +##graphic +##elle +cannes +##inate +depressed +##rit +heroic +importantly +theresa +##tled +conway +saturn +marginal +rae +##xia +corresponds +royce +pact +jasper +explosives +packaging +aluminium +##ttered +denotes +rhythmic +spans +assignments +hereditary +outlined +originating +sundays +lad +reissued +greeting +beatrice +##dic +pillar +marcos +plots +handbook +alcoholic +judiciary +avant +slides +extract +masculine +blur +##eum +##force +homage +trembled +owens +hymn +trey +omega +signaling +socks +accumulated +reacted +attic +theo +lining +angie +distraction +primera +talbot +##key +1200 +ti +creativity +billed +##hey +deacon +eduardo +identifies +proposition +dizzy +gunner +hogan +##yam +##pping +##hol +ja +##chan +jensen +reconstructed +##berger +clearance +darius +##nier +abe +harlem +plea +dei +circled +emotionally +notation +fascist +neville +exceeded +upwards +viable +ducks +##fo +workforce +racer +limiting +shri +##lson +possesses +1600 +kerr +moths +devastating +laden +disturbing +locking +##cture +gal +fearing +accreditation +flavor +aide +1870s +mountainous +##baum +melt +##ures +motel +texture +servers +soda +##mb +herd +##nium +erect +puzzled +hum +peggy +examinations +gould +testified +geoff +ren +devised +sacks +##law +denial +posters +grunted +cesar +tutor +ec +gerry +offerings +byrne +falcons +combinations +ct +incoming +pardon +rocking +26th +avengers +flared +mankind +seller +uttar +loch +nadia +stroking +exposing +##hd +fertile +ancestral +instituted +##has +noises +prophecy +taxation +eminent +vivid +pol +##bol +dart +indirect +multimedia +notebook +upside +displaying +adrenaline +referenced +geometric +##iving +progression +##ddy +blunt +announce +##far +implementing +##lav +aggression +liaison +cooler +cares +headache +plantations +gorge +dots +impulse +thickness +ashamed +averaging +kathy +obligation +precursor +137 +fowler +symmetry +thee +225 +hears +##rai +undergoing +ads +butcher +bowler +##lip +cigarettes +subscription +goodness +##ically +browne +##hos +##tech +kyoto +donor +##erty +damaging +friction +drifting +expeditions +hardened +prostitution +152 +fauna +blankets +claw +tossing +snarled +butterflies +recruits +investigative +coated +healed +138 +communal +hai +xiii +academics +boone +psychologist +restless +lahore +stephens +mba +brendan +foreigners +printer +##pc +ached +explode +27th +deed +scratched +dared +##pole +cardiac +1780 +okinawa +proto +commando +compelled +oddly +electrons +##base +replica +thanksgiving +##rist +sheila +deliberate +stafford +tidal +representations +hercules +ou +##path +##iated +kidnapping +lenses +##tling +deficit +samoa +mouths +consuming +computational +maze +granting +smirk +razor +fixture +ideals +inviting +aiden +nominal +##vs +issuing +julio +pitt +ramsey +docks +##oss +exhaust +##owed +bavarian +draped +anterior +mating +ethiopian +explores +noticing +##nton +discarded +convenience +hoffman +endowment +beasts +cartridge +mormon +paternal +probe +sleeves +interfere +lump +deadline +##rail +jenks +bulldogs +scrap +alternating +justified +reproductive +nam +seize +descending +secretariat +kirby +coupe +grouped +smash +panther +sedan +tapping +##18 +lola +cheer +germanic +unfortunate +##eter +unrelated +##fan +subordinate +##sdale +suzanne +advertisement +##ility +horsepower +##lda +cautiously +discourse +luigi +##mans +##fields +noun +prevalent +mao +schneider +everett +surround +governorate +kira +##avia +westward +##take +misty +rails +sustainability +134 +unused +##rating +packs +toast +unwilling +regulate +thy +suffrage +nile +awe +assam +definitions +travelers +affordable +##rb +conferred +sells +undefeated +beneficial +torso +basal +repeating +remixes +##pass +bahrain +cables +fang +##itated +excavated +numbering +statutory +##rey +deluxe +##lian +forested +ramirez +derbyshire +zeus +slamming +transfers +astronomer +banana +lottery +berg +histories +bamboo +##uchi +resurrection +posterior +bowls +vaguely +##thi +thou +preserving +tensed +offence +##inas +meyrick +callum +ridden +watt +langdon +tying +lowland +snorted +daring +truman +##hale +##girl +aura +overly +filing +weighing +goa +infections +philanthropist +saunders +eponymous +##owski +latitude +perspectives +reviewing +mets +commandant +radial +##kha +flashlight +reliability +koch +vowels +amazed +ada +elaine +supper +##rth +##encies +predator +debated +soviets +cola +##boards +##nah +compartment +crooked +arbitrary +fourteenth +##ctive +havana +majors +steelers +clips +profitable +ambush +exited +packers +##tile +nude +cracks +fungi +##е +limb +trousers +josie +shelby +tens +frederic +##ος +definite +smoothly +constellation +insult +baton +discs +lingering +##nco +conclusions +lent +staging +becker +grandpa +shaky +##tron +einstein +obstacles +sk +adverse +elle +economically +##moto +mccartney +thor +dismissal +motions +readings +nostrils +treatise +##pace +squeezing +evidently +prolonged +1783 +venezuelan +je +marguerite +beirut +takeover +shareholders +##vent +denise +digit +airplay +norse +##bbling +imaginary +pills +hubert +blaze +vacated +eliminating +##ello +vine +mansfield +##tty +retrospective +barrow +borne +clutch +bail +forensic +weaving +##nett +##witz +desktop +citadel +promotions +worrying +dorset +ieee +subdivided +##iating +manned +expeditionary +pickup +synod +chuckle +185 +barney +##rz +##ffin +functionality +karachi +litigation +meanings +uc +lick +turbo +anders +##ffed +execute +curl +oppose +ankles +typhoon +##د +##ache +##asia +linguistics +compassion +pressures +grazing +perfection +##iting +immunity +monopoly +muddy +backgrounds +136 +namibia +francesca +monitors +attracting +stunt +tuition +##ии +vegetable +##mates +##quent +mgm +jen +complexes +forts +##ond +cellar +bites +seventeenth +royals +flemish +failures +mast +charities +##cular +peruvian +capitals +macmillan +ipswich +outward +frigate +postgraduate +folds +employing +##ouse +concurrently +fiery +##tai +contingent +nightmares +monumental +nicaragua +##kowski +lizard +mal +fielding +gig +reject +##pad +harding +##ipe +coastline +##cin +##nos +beethoven +humphrey +innovations +##tam +##nge +norris +doris +solicitor +huang +obey +141 +##lc +niagara +##tton +shelves +aug +bourbon +curry +nightclub +specifications +hilton +##ndo +centennial +dispersed +worm +neglected +briggs +sm +font +kuala +uneasy +plc +##nstein +##bound +##aking +##burgh +awaiting +pronunciation +##bbed +##quest +eh +optimal +zhu +raped +greens +presided +brenda +worries +##life +venetian +marxist +turnout +##lius +refined +braced +sins +grasped +sunderland +nickel +speculated +lowell +cyrillic +communism +fundraising +resembling +colonists +mutant +freddie +usc +##mos +gratitude +##run +mural +##lous +chemist +wi +reminds +28th +steals +tess +pietro +##ingen +promoter +ri +microphone +honoured +rai +sant +##qui +feather +##nson +burlington +kurdish +terrorists +deborah +sickness +##wed +##eet +hazard +irritated +desperation +veil +clarity +##rik +jewels +xv +##gged +##ows +##cup +berkshire +unfair +mysteries +orchid +winced +exhaustion +renovations +stranded +obe +infinity +##nies +adapt +redevelopment +thanked +registry +olga +domingo +noir +tudor +ole +##atus +commenting +behaviors +##ais +crisp +pauline +probable +stirling +wigan +##bian +paralympics +panting +surpassed +##rew +luca +barred +pony +famed +##sters +cassandra +waiter +carolyn +exported +##orted +andres +destructive +deeds +jonah +castles +vacancy +suv +##glass +1788 +orchard +yep +famine +belarusian +sprang +##forth +skinny +##mis +administrators +rotterdam +zambia +zhao +boiler +discoveries +##ride +##physics +lucius +disappointing +outreach +spoon +##frame +qualifications +unanimously +enjoys +regency +##iidae +stade +realism +veterinary +rodgers +dump +alain +chestnut +castile +censorship +rumble +gibbs +##itor +communion +reggae +inactivated +logs +loads +##houses +homosexual +##iano +ale +informs +##cas +phrases +plaster +linebacker +ambrose +kaiser +fascinated +850 +limerick +recruitment +forge +mastered +##nding +leinster +rooted +threaten +##strom +borneo +##hes +suggestions +scholarships +propeller +documentaries +patronage +coats +constructing +invest +neurons +comet +entirety +shouts +identities +annoying +unchanged +wary +##antly +##ogy +neat +oversight +##kos +phillies +replay +constance +##kka +incarnation +humble +skies +minus +##acy +smithsonian +##chel +guerrilla +jar +cadets +##plate +surplus +audit +##aru +cracking +joanna +louisa +pacing +##lights +intentionally +##iri +diner +nwa +imprint +australians +tong +unprecedented +bunker +naive +specialists +ark +nichols +railing +leaked +pedal +##uka +shrub +longing +roofs +v8 +captains +neural +tuned +##ntal +##jet +emission +medina +frantic +codex +definitive +sid +abolition +intensified +stocks +enrique +sustain +genoa +oxide +##written +clues +cha +##gers +tributaries +fragment +venom +##rity +##ente +##sca +muffled +vain +sire +laos +##ingly +##hana +hastily +snapping +surfaced +sentiment +motive +##oft +contests +approximate +mesa +luckily +dinosaur +exchanges +propelled +accord +bourne +relieve +tow +masks +offended +##ues +cynthia +##mmer +rains +bartender +zinc +reviewers +lois +##sai +legged +arrogant +rafe +rosie +comprise +handicap +blockade +inlet +lagoon +copied +drilling +shelley +petals +##inian +mandarin +obsolete +##inated +onward +arguably +productivity +cindy +praising +seldom +busch +discusses +raleigh +shortage +ranged +stanton +encouragement +firstly +conceded +overs +temporal +##uke +cbe +##bos +woo +certainty +pumps +##pton +stalked +##uli +lizzie +periodic +thieves +weaker +##night +gases +shoving +chooses +wc +##chemical +prompting +weights +##kill +robust +flanked +sticky +hu +tuberculosis +##eb +##eal +christchurch +resembled +wallet +reese +inappropriate +pictured +distract +fixing +fiddle +giggled +burger +heirs +hairy +mechanic +torque +apache +obsessed +chiefly +cheng +logging +##tag +extracted +meaningful +numb +##vsky +gloucestershire +reminding +##bay +unite +##lit +breeds +diminished +clown +glove +1860s +##ن +##ug +archibald +focal +freelance +sliced +depiction +##yk +organism +switches +sights +stray +crawling +##ril +lever +leningrad +interpretations +loops +anytime +reel +alicia +delighted +##ech +inhaled +xiv +suitcase +bernie +vega +licenses +northampton +exclusion +induction +monasteries +racecourse +homosexuality +##right +##sfield +##rky +dimitri +michele +alternatives +ions +commentators +genuinely +objected +pork +hospitality +fencing +stephan +warships +peripheral +wit +drunken +wrinkled +quentin +spends +departing +chung +numerical +spokesperson +##zone +johannesburg +caliber +killers +##udge +assumes +neatly +demographic +abigail +bloc +##vel +mounting +##lain +bentley +slightest +xu +recipients +##jk +merlin +##writer +seniors +prisons +blinking +hindwings +flickered +kappa +##hel +80s +strengthening +appealing +brewing +gypsy +mali +lashes +hulk +unpleasant +harassment +bio +treaties +predict +instrumentation +pulp +troupe +boiling +mantle +##ffe +ins +##vn +dividing +handles +verbs +##onal +coconut +senegal +340 +thorough +gum +momentarily +##sto +cocaine +panicked +destined +##turing +teatro +denying +weary +captained +mans +##hawks +##code +wakefield +bollywood +thankfully +##16 +cyril +##wu +amendments +##bahn +consultation +stud +reflections +kindness +1787 +internally +##ovo +tex +mosaic +distribute +paddy +seeming +143 +##hic +piers +##15 +##mura +##verse +popularly +winger +kang +sentinel +mccoy +##anza +covenant +##bag +verge +fireworks +suppress +thrilled +dominate +##jar +swansea +##60 +142 +reconciliation +##ndi +stiffened +cue +dorian +##uf +damascus +amor +ida +foremost +##aga +porsche +unseen +dir +##had +##azi +stony +lexi +melodies +##nko +angular +integer +podcast +ants +inherent +jaws +justify +persona +##olved +josephine +##nr +##ressed +customary +flashes +gala +cyrus +glaring +backyard +ariel +physiology +greenland +html +stir +avon +atletico +finch +methodology +ked +##lent +mas +catholicism +townsend +branding +quincy +fits +containers +1777 +ashore +aragon +##19 +forearm +poisoning +##sd +adopting +conquer +grinding +amnesty +keller +finances +evaluate +forged +lankan +instincts +##uto +guam +bosnian +photographed +workplace +desirable +protector +##dog +allocation +intently +encourages +willy +##sten +bodyguard +electro +brighter +##ν +bihar +##chev +lasts +opener +amphibious +sal +verde +arte +##cope +captivity +vocabulary +yields +##tted +agreeing +desmond +pioneered +##chus +strap +campaigned +railroads +##ович +emblem +##dre +stormed +501 +##ulous +marijuana +northumberland +##gn +##nath +bowen +landmarks +beaumont +##qua +danube +##bler +attorneys +th +ge +flyers +critique +villains +cass +mutation +acc +##0s +colombo +mckay +motif +sampling +concluding +syndicate +##rell +neon +stables +ds +warnings +clint +mourning +wilkinson +##tated +merrill +leopard +evenings +exhaled +emil +sonia +ezra +discrete +stove +farrell +fifteenth +prescribed +superhero +##rier +worms +helm +wren +##duction +##hc +expo +##rator +hq +unfamiliar +antony +prevents +acceleration +fiercely +mari +painfully +calculations +cheaper +ign +clifton +irvine +davenport +mozambique +##np +pierced +##evich +wonders +##wig +##cate +##iling +crusade +ware +##uel +enzymes +reasonably +mls +##coe +mater +ambition +bunny +eliot +kernel +##fin +asphalt +headmaster +torah +aden +lush +pins +waived +##care +##yas +joao +substrate +enforce +##grad +##ules +alvarez +selections +epidemic +tempted +##bit +bremen +translates +ensured +waterfront +29th +forrest +manny +malone +kramer +reigning +cookies +simpler +absorption +205 +engraved +##ffy +evaluated +1778 +haze +146 +comforting +crossover +##abe +thorn +##rift +##imo +##pop +suppression +fatigue +cutter +##tr +201 +wurttemberg +##orf +enforced +hovering +proprietary +gb +samurai +syllable +ascent +lacey +tick +lars +tractor +merchandise +rep +bouncing +defendants +##yre +huntington +##ground +##oko +standardized +##hor +##hima +assassinated +nu +predecessors +rainy +liar +assurance +lyrical +##uga +secondly +flattened +ios +parameter +undercover +##mity +bordeaux +punish +ridges +markers +exodus +inactive +hesitate +debbie +nyc +pledge +savoy +nagar +offset +organist +##tium +hesse +marin +converting +##iver +diagram +propulsion +pu +validity +reverted +supportive +##dc +ministries +clans +responds +proclamation +##inae +##ø +##rea +ein +pleading +patriot +sf +birch +islanders +strauss +hates +##dh +brandenburg +concession +rd +##ob +1900s +killings +textbook +antiquity +cinematography +wharf +embarrassing +setup +creed +farmland +inequality +centred +signatures +fallon +370 +##ingham +##uts +ceylon +gazing +directive +laurie +##tern +globally +##uated +##dent +allah +excavation +threads +##cross +148 +frantically +icc +utilize +determines +respiratory +thoughtful +receptions +##dicate +merging +chandra +seine +147 +builders +builds +diagnostic +dev +visibility +goddamn +analyses +dhaka +cho +proves +chancel +concurrent +curiously +canadians +pumped +restoring +1850s +turtles +jaguar +sinister +spinal +traction +declan +vows +1784 +glowed +capitalism +swirling +install +universidad +##lder +##oat +soloist +##genic +##oor +coincidence +beginnings +nissan +dip +resorts +caucasus +combustion +infectious +##eno +pigeon +serpent +##itating +conclude +masked +salad +jew +##gr +surreal +toni +##wc +harmonica +151 +##gins +##etic +##coat +fishermen +intending +bravery +##wave +klaus +titan +wembley +taiwanese +ransom +40th +incorrect +hussein +eyelids +jp +cooke +dramas +utilities +##etta +##print +eisenhower +principally +granada +lana +##rak +openings +concord +##bl +bethany +connie +morality +sega +##mons +##nard +earnings +##kara +##cine +wii +communes +##rel +coma +composing +softened +severed +grapes +##17 +nguyen +analyzed +warlord +hubbard +heavenly +behave +slovenian +##hit +##ony +hailed +filmmakers +trance +caldwell +skye +unrest +coward +likelihood +##aging +bern +sci +taliban +honolulu +propose +##wang +1700 +browser +imagining +cobra +contributes +dukes +instinctively +conan +violinist +##ores +accessories +gradual +##amp +quotes +sioux +##dating +undertake +intercepted +sparkling +compressed +139 +fungus +tombs +haley +imposing +rests +degradation +lincolnshire +retailers +wetlands +tulsa +distributor +dungeon +nun +greenhouse +convey +atlantis +aft +exits +oman +dresser +lyons +##sti +joking +eddy +judgement +omitted +digits +##cts +##game +juniors +##rae +cents +stricken +une +##ngo +wizards +weir +breton +nan +technician +fibers +liking +royalty +##cca +154 +persia +terribly +magician +##rable +##unt +vance +cafeteria +booker +camille +warmer +##static +consume +cavern +gaps +compass +contemporaries +foyer +soothing +graveyard +maj +plunged +blush +##wear +cascade +demonstrates +ordinance +##nov +boyle +##lana +rockefeller +shaken +banjo +izzy +##ense +breathless +vines +##32 +##eman +alterations +chromosome +dwellings +feudal +mole +153 +catalonia +relics +tenant +mandated +##fm +fridge +hats +honesty +patented +raul +heap +cruisers +accusing +enlightenment +infants +wherein +chatham +contractors +zen +affinity +hc +osborne +piston +156 +traps +maturity +##rana +lagos +##zal +peering +##nay +attendant +dealers +protocols +subset +prospects +biographical +##cre +artery +##zers +insignia +nuns +endured +##eration +recommend +schwartz +serbs +berger +cromwell +crossroads +##ctor +enduring +clasped +grounded +##bine +marseille +twitched +abel +choke +https +catalyst +moldova +italians +##tist +disastrous +wee +##oured +##nti +wwf +nope +##piration +##asa +expresses +thumbs +167 +##nza +coca +1781 +cheating +##ption +skipped +sensory +heidelberg +spies +satan +dangers +semifinal +202 +bohemia +whitish +confusing +shipbuilding +relies +surgeons +landings +ravi +baku +moor +suffix +alejandro +##yana +litre +upheld +##unk +rajasthan +##rek +coaster +insists +posture +scenarios +etienne +favoured +appoint +transgender +elephants +poked +greenwood +defences +fulfilled +militant +somali +1758 +chalk +potent +##ucci +migrants +wink +assistants +nos +restriction +activism +niger +##ario +colon +shaun +##sat +daphne +##erated +swam +congregations +reprise +considerations +magnet +playable +xvi +##р +overthrow +tobias +knob +chavez +coding +##mers +propped +katrina +orient +newcomer +##suke +temperate +##pool +farmhouse +interrogation +##vd +committing +##vert +forthcoming +strawberry +joaquin +macau +ponds +shocking +siberia +##cellular +chant +contributors +##nant +##ologists +sped +absorb +hail +1782 +spared +##hore +barbados +karate +opus +originates +saul +##xie +evergreen +leaped +##rock +correlation +exaggerated +weekday +unification +bump +tracing +brig +afb +pathways +utilizing +##ners +mod +mb +disturbance +kneeling +##stad +##guchi +100th +pune +##thy +decreasing +168 +manipulation +miriam +academia +ecosystem +occupational +rbi +##lem +rift +##14 +rotary +stacked +incorporation +awakening +generators +guerrero +racist +##omy +cyber +derivatives +culminated +allie +annals +panzer +sainte +wikipedia +pops +zu +austro +##vate +algerian +politely +nicholson +mornings +educate +tastes +thrill +dartmouth +##gating +db +##jee +regan +differing +concentrating +choreography +divinity +##media +pledged +alexandre +routing +gregor +madeline +##idal +apocalypse +##hora +gunfire +culminating +elves +fined +liang +lam +programmed +tar +guessing +transparency +gabrielle +##gna +cancellation +flexibility +##lining +accession +shea +stronghold +nets +specializes +##rgan +abused +hasan +sgt +ling +exceeding +##₄ +admiration +supermarket +##ark +photographers +specialised +tilt +resonance +hmm +perfume +380 +sami +threatens +garland +botany +guarding +boiled +greet +puppy +russo +supplier +wilmington +vibrant +vijay +##bius +paralympic +grumbled +paige +faa +licking +margins +hurricanes +##gong +fest +grenade +ripping +##uz +counseling +weigh +##sian +needles +wiltshire +edison +costly +##not +fulton +tramway +redesigned +staffordshire +cache +gasping +watkins +sleepy +candidacy +##group +monkeys +timeline +throbbing +##bid +##sos +berth +uzbekistan +vanderbilt +bothering +overturned +ballots +gem +##iger +sunglasses +subscribers +hooker +compelling +ang +exceptionally +saloon +stab +##rdi +carla +terrifying +rom +##vision +coil +##oids +satisfying +vendors +31st +mackay +deities +overlooked +ambient +bahamas +felipe +olympia +whirled +botanist +advertised +tugging +##dden +disciples +morales +unionist +rites +foley +morse +motives +creepy +##₀ +soo +##sz +bargain +highness +frightening +turnpike +tory +reorganization +##cer +depict +biographer +##walk +unopposed +manifesto +##gles +institut +emile +accidental +kapoor +##dam +kilkenny +cortex +lively +##13 +romanesque +jain +shan +cannons +##ood +##ske +petrol +echoing +amalgamated +disappears +cautious +proposes +sanctions +trenton +##ر +flotilla +aus +contempt +tor +canary +cote +theirs +##hun +conceptual +deleted +fascinating +paso +blazing +elf +honourable +hutchinson +##eiro +##outh +##zin +surveyor +tee +amidst +wooded +reissue +intro +##ono +cobb +shelters +newsletter +hanson +brace +encoding +confiscated +dem +caravan +marino +scroll +melodic +cows +imam +##adi +##aneous +northward +searches +biodiversity +cora +310 +roaring +##bers +connell +theologian +halo +compose +pathetic +unmarried +dynamo +##oot +az +calculation +toulouse +deserves +humour +nr +forgiveness +tam +undergone +martyr +pamela +myths +whore +counselor +hicks +290 +heavens +battleship +electromagnetic +##bbs +stellar +establishments +presley +hopped +##chin +temptation +90s +wills +nas +##yuan +nhs +##nya +seminars +##yev +adaptations +gong +asher +lex +indicator +sikh +tobago +cites +goin +##yte +satirical +##gies +characterised +correspond +bubbles +lure +participates +##vid +eruption +skate +therapeutic +1785 +canals +wholesale +defaulted +sac +460 +petit +##zzled +virgil +leak +ravens +256 +portraying +##yx +ghetto +creators +dams +portray +vicente +##rington +fae +namesake +bounty +##arium +joachim +##ota +##iser +aforementioned +axle +snout +depended +dismantled +reuben +480 +##ibly +gallagher +##lau +##pd +earnest +##ieu +##iary +inflicted +objections +##llar +asa +gritted +##athy +jericho +##sea +##was +flick +underside +ceramics +undead +substituted +195 +eastward +undoubtedly +wheeled +chimney +##iche +guinness +cb +##ager +siding +##bell +traitor +baptiste +disguised +inauguration +149 +tipperary +choreographer +perched +warmed +stationary +eco +##ike +##ntes +bacterial +##aurus +flores +phosphate +##core +attacker +invaders +alvin +intersects +a1 +indirectly +immigrated +businessmen +cornelius +valves +narrated +pill +sober +ul +nationale +monastic +applicants +scenery +##jack +161 +motifs +constitutes +cpu +##osh +jurisdictions +sd +tuning +irritation +woven +##uddin +fertility +gao +##erie +antagonist +impatient +glacial +hides +boarded +denominations +interception +##jas +cookie +nicola +##tee +algebraic +marquess +bahn +parole +buyers +bait +turbines +paperwork +bestowed +natasha +renee +oceans +purchases +157 +vaccine +215 +##tock +fixtures +playhouse +integrate +jai +oswald +intellectuals +##cky +booked +nests +mortimer +##isi +obsession +sept +##gler +##sum +440 +scrutiny +simultaneous +squinted +##shin +collects +oven +shankar +penned +remarkably +##я +slips +luggage +spectral +1786 +collaborations +louie +consolidation +##ailed +##ivating +420 +hoover +blackpool +harness +ignition +vest +tails +belmont +mongol +skinner +##nae +visually +mage +derry +##tism +##unce +stevie +transitional +##rdy +redskins +drying +prep +prospective +##21 +annoyance +oversee +##loaded +fills +##books +##iki +announces +fda +scowled +respects +prasad +mystic +tucson +##vale +revue +springer +bankrupt +1772 +aristotle +salvatore +habsburg +##geny +dal +natal +nut +pod +chewing +darts +moroccan +walkover +rosario +lenin +punjabi +##ße +grossed +scattering +wired +invasive +hui +polynomial +corridors +wakes +gina +portrays +##cratic +arid +retreating +erich +irwin +sniper +##dha +linen +lindsey +maneuver +butch +shutting +socio +bounce +commemorative +postseason +jeremiah +pines +275 +mystical +beads +bp +abbas +furnace +bidding +consulted +assaulted +empirical +rubble +enclosure +sob +weakly +cancel +polly +yielded +##emann +curly +prediction +battered +70s +vhs +jacqueline +render +sails +barked +detailing +grayson +riga +sloane +raging +##yah +herbs +bravo +##athlon +alloy +giggle +imminent +suffers +assumptions +waltz +##itate +accomplishments +##ited +bathing +remixed +deception +prefix +##emia +deepest +##tier +##eis +balkan +frogs +##rong +slab +##pate +philosophers +peterborough +grains +imports +dickinson +rwanda +##atics +1774 +dirk +lan +tablets +##rove +clone +##rice +caretaker +hostilities +mclean +##gre +regimental +treasures +norms +impose +tsar +tango +diplomacy +variously +complain +192 +recognise +arrests +1779 +celestial +pulitzer +##dus +bing +libretto +##moor +adele +splash +##rite +expectation +lds +confronts +##izer +spontaneous +harmful +wedge +entrepreneurs +buyer +##ope +bilingual +translate +rugged +conner +circulated +uae +eaton +##gra +##zzle +lingered +lockheed +vishnu +reelection +alonso +##oom +joints +yankee +headline +cooperate +heinz +laureate +invading +##sford +echoes +scandinavian +##dham +hugging +vitamin +salute +micah +hind +trader +##sper +radioactive +##ndra +militants +poisoned +ratified +remark +campeonato +deprived +wander +prop +##dong +outlook +##tani +##rix +##eye +chiang +darcy +##oping +mandolin +spice +statesman +babylon +182 +walled +forgetting +afro +##cap +158 +giorgio +buffer +##polis +planetary +##gis +overlap +terminals +kinda +centenary +##bir +arising +manipulate +elm +ke +1770 +ak +##tad +chrysler +mapped +moose +pomeranian +quad +macarthur +assemblies +shoreline +recalls +stratford +##rted +noticeable +##evic +imp +##rita +##sque +accustomed +supplying +tents +disgusted +vogue +sipped +filters +khz +reno +selecting +luftwaffe +mcmahon +tyne +masterpiece +carriages +collided +dunes +exercised +flare +remembers +muzzle +##mobile +heck +##rson +burgess +lunged +middleton +boycott +bilateral +##sity +hazardous +lumpur +multiplayer +spotlight +jackets +goldman +liege +porcelain +rag +waterford +benz +attracts +hopeful +battling +ottomans +kensington +baked +hymns +cheyenne +lattice +levine +borrow +polymer +clashes +michaels +monitored +commitments +denounced +##25 +##von +cavity +##oney +hobby +akin +##holders +futures +intricate +cornish +patty +##oned +illegally +dolphin +##lag +barlow +yellowish +maddie +apologized +luton +plagued +##puram +nana +##rds +sway +fanny +łodz +##rino +psi +suspicions +hanged +##eding +initiate +charlton +##por +nak +competent +235 +analytical +annex +wardrobe +reservations +##rma +sect +162 +fairfax +hedge +piled +buckingham +uneven +bauer +simplicity +snyder +interpret +accountability +donors +moderately +byrd +continents +##cite +##max +disciple +hr +jamaican +ping +nominees +##uss +mongolian +diver +attackers +eagerly +ideological +pillows +miracles +apartheid +revolver +sulfur +clinics +moran +163 +##enko +ile +katy +rhetoric +##icated +chronology +recycling +##hrer +elongated +mughal +pascal +profiles +vibration +databases +domination +##fare +##rant +matthias +digest +rehearsal +polling +weiss +initiation +reeves +clinging +flourished +impress +ngo +##hoff +##ume +buckley +symposium +rhythms +weed +emphasize +transforming +##taking +##gence +##yman +accountant +analyze +flicker +foil +priesthood +voluntarily +decreases +##80 +##hya +slater +sv +charting +mcgill +##lde +moreno +##iu +besieged +zur +robes +##phic +admitting +api +deported +turmoil +peyton +earthquakes +##ares +nationalists +beau +clair +brethren +interrupt +welch +curated +galerie +requesting +164 +##ested +impending +steward +viper +##vina +complaining +beautifully +brandy +foam +nl +1660 +##cake +alessandro +punches +laced +explanations +##lim +attribute +clit +reggie +discomfort +##cards +smoothed +whales +##cene +adler +countered +duffy +disciplinary +widening +recipe +reliance +conducts +goats +gradient +preaching +##shaw +matilda +quasi +striped +meridian +cannabis +cordoba +certificates +##agh +##tering +graffiti +hangs +pilgrims +repeats +##ych +revive +urine +etat +##hawk +fueled +belts +fuzzy +susceptible +##hang +mauritius +salle +sincere +beers +hooks +##cki +arbitration +entrusted +advise +sniffed +seminar +junk +donnell +processors +principality +strapped +celia +mendoza +everton +fortunes +prejudice +starving +reassigned +steamer +##lund +tuck +evenly +foreman +##ffen +dans +375 +envisioned +slit +##xy +baseman +liberia +rosemary +##weed +electrified +periodically +potassium +stride +contexts +sperm +slade +mariners +influx +bianca +subcommittee +##rane +spilling +icao +estuary +##nock +delivers +iphone +##ulata +isa +mira +bohemian +dessert +##sbury +welcoming +proudly +slowing +##chs +musee +ascension +russ +##vian +waits +##psy +africans +exploit +##morphic +gov +eccentric +crab +peck +##ull +entrances +formidable +marketplace +groom +bolted +metabolism +patton +robbins +courier +payload +endure +##ifier +andes +refrigerator +##pr +ornate +##uca +ruthless +illegitimate +masonry +strasbourg +bikes +adobe +##³ +apples +quintet +willingly +niche +bakery +corpses +energetic +##cliffe +##sser +##ards +177 +centimeters +centro +fuscous +cretaceous +rancho +##yde +andrei +telecom +tottenham +oasis +ordination +vulnerability +presiding +corey +cp +penguins +sims +##pis +malawi +piss +##48 +correction +##cked +##ffle +##ryn +countdown +detectives +psychiatrist +psychedelic +dinosaurs +blouse +##get +choi +vowed +##oz +randomly +##pol +49ers +scrub +blanche +bruins +dusseldorf +##using +unwanted +##ums +212 +dominique +elevations +headlights +om +laguna +##oga +1750 +famously +ignorance +shrewsbury +##aine +ajax +breuning +che +confederacy +greco +overhaul +##screen +paz +skirts +disagreement +cruelty +jagged +phoebe +shifter +hovered +viruses +##wes +mandy +##lined +##gc +landlord +squirrel +dashed +##ι +ornamental +gag +wally +grange +literal +spurs +undisclosed +proceeding +yin +##text +billie +orphan +spanned +humidity +indy +weighted +presentations +explosions +lucian +##tary +vaughn +hindus +##anga +##hell +psycho +171 +daytona +protects +efficiently +rematch +sly +tandem +##oya +rebranded +impaired +hee +metropolis +peach +godfrey +diaspora +ethnicity +prosperous +gleaming +dar +grossing +playback +##rden +stripe +pistols +##tain +births +labelled +##cating +172 +rudy +alba +##onne +aquarium +hostility +##gb +##tase +shudder +sumatra +hardest +lakers +consonant +creeping +demos +homicide +capsule +zeke +liberties +expulsion +pueblo +##comb +trait +transporting +##ddin +##neck +##yna +depart +gregg +mold +ledge +hangar +oldham +playboy +termination +analysts +gmbh +romero +##itic +insist +cradle +filthy +brightness +slash +shootout +deposed +bordering +##truct +isis +microwave +tumbled +sheltered +cathy +werewolves +messy +andersen +convex +clapped +clinched +satire +wasting +edo +vc +rufus +##jak +mont +##etti +poznan +##keeping +restructuring +transverse +##rland +azerbaijani +slovene +gestures +roommate +choking +shear +##quist +vanguard +oblivious +##hiro +disagreed +baptism +##lich +coliseum +##aceae +salvage +societe +cory +locke +relocation +relying +versailles +ahl +swelling +##elo +cheerful +##word +##edes +gin +sarajevo +obstacle +diverted +##nac +messed +thoroughbred +fluttered +utrecht +chewed +acquaintance +assassins +dispatch +mirza +##wart +nike +salzburg +swell +yen +##gee +idle +ligue +samson +##nds +##igh +playful +spawned +##cise +tease +##case +burgundy +##bot +stirring +skeptical +interceptions +marathi +##dies +bedrooms +aroused +pinch +##lik +preferences +tattoos +buster +digitally +projecting +rust +##ital +kitten +priorities +addison +pseudo +##guard +dusk +icons +sermon +##psis +##iba +bt +##lift +##xt +ju +truce +rink +##dah +##wy +defects +psychiatry +offences +calculate +glucose +##iful +##rized +##unda +francaise +##hari +richest +warwickshire +carly +1763 +purity +redemption +lending +##cious +muse +bruises +cerebral +aero +carving +##name +preface +terminology +invade +monty +##int +anarchist +blurred +##iled +rossi +treats +guts +shu +foothills +ballads +undertaking +premise +cecilia +affiliates +blasted +conditional +wilder +minors +drone +rudolph +buffy +swallowing +horton +attested +##hop +rutherford +howell +primetime +livery +penal +##bis +minimize +hydro +wrecked +wrought +palazzo +##gling +cans +vernacular +friedman +nobleman +shale +walnut +danielle +##ection +##tley +sears +##kumar +chords +lend +flipping +streamed +por +dracula +gallons +sacrifices +gamble +orphanage +##iman +mckenzie +##gible +boxers +daly +##balls +##ان +208 +##ific +##rative +##iq +exploited +slated +##uity +circling +hillary +pinched +goldberg +provost +campaigning +lim +piles +ironically +jong +mohan +successors +usaf +##tem +##ught +autobiographical +haute +preserves +##ending +acquitted +comparisons +203 +hydroelectric +gangs +cypriot +torpedoes +rushes +chrome +derive +bumps +instability +fiat +pets +##mbe +silas +dye +reckless +settler +##itation +info +heats +##writing +176 +canonical +maltese +fins +mushroom +stacy +aspen +avid +##kur +##loading +vickers +gaston +hillside +statutes +wilde +gail +kung +sabine +comfortably +motorcycles +##rgo +169 +pneumonia +fetch +##sonic +axel +faintly +parallels +##oop +mclaren +spouse +compton +interdisciplinary +miner +##eni +181 +clamped +##chal +##llah +separates +versa +##mler +scarborough +labrador +##lity +##osing +rutgers +hurdles +como +166 +burt +divers +##100 +wichita +cade +coincided +##erson +bruised +mla +##pper +vineyard +##ili +##brush +notch +mentioning +jase +hearted +kits +doe +##acle +pomerania +##ady +ronan +seizure +pavel +problematic +##zaki +domenico +##ulin +catering +penelope +dependence +parental +emilio +ministerial +atkinson +##bolic +clarkson +chargers +colby +grill +peeked +arises +summon +##aged +fools +##grapher +faculties +qaeda +##vial +garner +refurbished +##hwa +geelong +disasters +nudged +bs +shareholder +lori +algae +reinstated +rot +##ades +##nous +invites +stainless +183 +inclusive +##itude +diocesan +til +##icz +denomination +##xa +benton +floral +registers +##ider +##erman +##kell +absurd +brunei +guangzhou +hitter +retaliation +##uled +##eve +blanc +nh +consistency +contamination +##eres +##rner +dire +palermo +broadcasters +diaries +inspire +vols +brewer +tightening +ky +mixtape +hormone +##tok +stokes +##color +##dly +##ssi +pg +##ometer +##lington +sanitation +##tility +intercontinental +apps +##adt +¹⁄₂ +cylinders +economies +favourable +unison +croix +gertrude +odyssey +vanity +dangling +##logists +upgrades +dice +middleweight +practitioner +##ight +206 +henrik +parlor +orion +angered +lac +python +blurted +##rri +sensual +intends +swings +angled +##phs +husky +attain +peerage +precinct +textiles +cheltenham +shuffled +dai +confess +tasting +bhutan +##riation +tyrone +segregation +abrupt +ruiz +##rish +smirked +blackwell +confidential +browning +amounted +##put +vase +scarce +fabulous +raided +staple +guyana +unemployed +glider +shay +##tow +carmine +troll +intervene +squash +superstar +##uce +cylindrical +len +roadway +researched +handy +##rium +##jana +meta +lao +declares +##rring +##tadt +##elin +##kova +willem +shrubs +napoleonic +realms +skater +qi +volkswagen +##ł +tad +hara +archaeologist +awkwardly +eerie +##kind +wiley +##heimer +##24 +titus +organizers +cfl +crusaders +lama +usb +vent +enraged +thankful +occupants +maximilian +##gaard +possessing +textbooks +##oran +collaborator +quaker +##ulo +avalanche +mono +silky +straits +isaiah +mustang +surged +resolutions +potomac +descend +cl +kilograms +plato +strains +saturdays +##olin +bernstein +##ype +holstein +ponytail +##watch +belize +conversely +heroine +perpetual +##ylus +charcoal +piedmont +glee +negotiating +backdrop +prologue +##jah +##mmy +pasadena +climbs +ramos +sunni +##holm +##tner +##tri +anand +deficiency +hertfordshire +stout +##avi +aperture +orioles +##irs +doncaster +intrigued +bombed +coating +otis +##mat +cocktail +##jit +##eto +amir +arousal +sar +##proof +##act +##ories +dixie +pots +##bow +whereabouts +159 +##fted +drains +bullying +cottages +scripture +coherent +fore +poe +appetite +##uration +sampled +##ators +##dp +derrick +rotor +jays +peacock +installment +##rro +advisors +##coming +rodeo +scotch +##mot +##db +##fen +##vant +ensued +rodrigo +dictatorship +martyrs +twenties +##н +towed +incidence +marta +rainforest +sai +scaled +##cles +oceanic +qualifiers +symphonic +mcbride +dislike +generalized +aubrey +colonization +##iation +##lion +##ssing +disliked +lublin +salesman +##ulates +spherical +whatsoever +sweating +avalon +contention +punt +severity +alderman +atari +##dina +##grant +##rop +scarf +seville +vertices +annexation +fairfield +fascination +inspiring +launches +palatinate +regretted +##rca +feral +##iom +elk +nap +olsen +reddy +yong +##leader +##iae +garment +transports +feng +gracie +outrage +viceroy +insides +##esis +breakup +grady +organizer +softer +grimaced +222 +murals +galicia +arranging +vectors +##rsten +bas +##sb +##cens +sloan +##eka +bitten +ara +fender +nausea +bumped +kris +banquet +comrades +detector +persisted +##llan +adjustment +endowed +cinemas +##shot +sellers +##uman +peek +epa +kindly +neglect +simpsons +talon +mausoleum +runaway +hangul +lookout +##cic +rewards +coughed +acquainted +chloride +##ald +quicker +accordion +neolithic +##qa +artemis +coefficient +lenny +pandora +tx +##xed +ecstasy +litter +segunda +chairperson +gemma +hiss +rumor +vow +nasal +antioch +compensate +patiently +transformers +##eded +judo +morrow +penis +posthumous +philips +bandits +husbands +denote +flaming +##any +##phones +langley +yorker +1760 +walters +##uo +##kle +gubernatorial +fatty +samsung +leroy +outlaw +##nine +unpublished +poole +jakob +##ᵢ +##ₙ +crete +distorted +superiority +##dhi +intercept +crust +mig +claus +crashes +positioning +188 +stallion +301 +frontal +armistice +##estinal +elton +aj +encompassing +camel +commemorated +malaria +woodward +calf +cigar +penetrate +##oso +willard +##rno +##uche +illustrate +amusing +convergence +noteworthy +##lma +##rva +journeys +realise +manfred +##sable +410 +##vocation +hearings +fiance +##posed +educators +provoked +adjusting +##cturing +modular +stockton +paterson +vlad +rejects +electors +selena +maureen +##tres +uber +##rce +swirled +##num +proportions +nanny +pawn +naturalist +parma +apostles +awoke +ethel +wen +##bey +monsoon +overview +##inating +mccain +rendition +risky +adorned +##ih +equestrian +germain +nj +conspicuous +confirming +##yoshi +shivering +##imeter +milestone +rumours +flinched +bounds +smacked +token +##bei +lectured +automobiles +##shore +impacted +##iable +nouns +nero +##leaf +ismail +prostitute +trams +##lace +bridget +sud +stimulus +impressions +reins +revolves +##oud +##gned +giro +honeymoon +##swell +criterion +##sms +##uil +libyan +prefers +##osition +211 +preview +sucks +accusation +bursts +metaphor +diffusion +tolerate +faye +betting +cinematographer +liturgical +specials +bitterly +humboldt +##ckle +flux +rattled +##itzer +archaeologists +odor +authorised +marshes +discretion +##ов +alarmed +archaic +inverse +##leton +explorers +##pine +drummond +tsunami +woodlands +##minate +##tland +booklet +insanity +owning +insert +crafted +calculus +##tore +receivers +##bt +stung +##eca +##nched +prevailing +travellers +eyeing +lila +graphs +##borne +178 +julien +##won +morale +adaptive +therapist +erica +cw +libertarian +bowman +pitches +vita +##ional +crook +##ads +##entation +caledonia +mutiny +##sible +1840s +automation +##ß +flock +##pia +ironic +pathology +##imus +remarried +##22 +joker +withstand +energies +##att +shropshire +hostages +madeleine +tentatively +conflicting +mateo +recipes +euros +ol +mercenaries +nico +##ndon +albuquerque +augmented +mythical +bel +freud +##child +cough +##lica +365 +freddy +lillian +genetically +nuremberg +calder +209 +bonn +outdoors +paste +suns +urgency +vin +restraint +tyson +##cera +##selle +barrage +bethlehem +kahn +##par +mounts +nippon +barony +happier +ryu +makeshift +sheldon +blushed +castillo +barking +listener +taped +bethel +fluent +headlines +pornography +rum +disclosure +sighing +mace +doubling +gunther +manly +##plex +rt +interventions +physiological +forwards +emerges +##tooth +##gny +compliment +rib +recession +visibly +barge +faults +connector +exquisite +prefect +##rlin +patio +##cured +elevators +brandt +italics +pena +173 +wasp +satin +ea +botswana +graceful +respectable +##jima +##rter +##oic +franciscan +generates +##dl +alfredo +disgusting +##olate +##iously +sherwood +warns +cod +promo +cheryl +sino +##ة +##escu +twitch +##zhi +brownish +thom +ortiz +##dron +densely +##beat +carmel +reinforce +##bana +187 +anastasia +downhill +vertex +contaminated +remembrance +harmonic +homework +##sol +fiancee +gears +olds +angelica +loft +ramsay +quiz +colliery +sevens +##cape +autism +##hil +walkway +##boats +ruben +abnormal +ounce +khmer +##bbe +zachary +bedside +morphology +punching +##olar +sparrow +convinces +##35 +hewitt +queer +remastered +rods +mabel +solemn +notified +lyricist +symmetric +##xide +174 +encore +passports +wildcats +##uni +baja +##pac +mildly +##ease +bleed +commodity +mounds +glossy +orchestras +##omo +damian +prelude +ambitions +##vet +awhile +remotely +##aud +asserts +imply +##iques +distinctly +modelling +remedy +##dded +windshield +dani +xiao +##endra +audible +powerplant +1300 +invalid +elemental +acquisitions +##hala +immaculate +libby +plata +smuggling +ventilation +denoted +minh +##morphism +430 +differed +dion +kelley +lore +mocking +sabbath +spikes +hygiene +drown +runoff +stylized +tally +liberated +aux +interpreter +righteous +aba +siren +reaper +pearce +millie +##cier +##yra +gaius +##iso +captures +##ttering +dorm +claudio +##sic +benches +knighted +blackness +##ored +discount +fumble +oxidation +routed +##ς +novak +perpendicular +spoiled +fracture +splits +##urt +pads +topology +##cats +axes +fortunate +offenders +protestants +esteem +221 +broadband +convened +frankly +hound +prototypes +isil +facilitated +keel +##sher +sahara +awaited +bubba +orb +prosecutors +186 +hem +520 +##xing +relaxing +remnant +romney +sorted +slalom +stefano +ulrich +##active +exemption +folder +pauses +foliage +hitchcock +epithet +204 +criticisms +##aca +ballistic +brody +hinduism +chaotic +youths +equals +##pala +pts +thicker +analogous +capitalist +improvised +overseeing +sinatra +ascended +beverage +##tl +straightforward +##kon +curran +##west +bois +325 +induce +surveying +emperors +sax +unpopular +##kk +cartoonist +fused +##mble +unto +##yuki +localities +##cko +##ln +darlington +slain +academie +lobbying +sediment +puzzles +##grass +defiance +dickens +manifest +tongues +alumnus +arbor +coincide +184 +appalachian +mustafa +examiner +cabaret +traumatic +yves +bracelet +draining +heroin +magnum +baths +odessa +consonants +mitsubishi +##gua +kellan +vaudeville +##fr +joked +null +straps +probation +##ław +ceded +interfaces +##pas +##zawa +blinding +viet +224 +rothschild +museo +640 +huddersfield +##vr +tactic +##storm +brackets +dazed +incorrectly +##vu +reg +glazed +fearful +manifold +benefited +irony +##sun +stumbling +##rte +willingness +balkans +mei +wraps +##aba +injected +##lea +gu +syed +harmless +##hammer +bray +takeoff +poppy +timor +cardboard +astronaut +purdue +weeping +southbound +cursing +stalls +diagonal +##neer +lamar +bryce +comte +weekdays +harrington +##uba +negatively +##see +lays +grouping +##cken +##henko +affirmed +halle +modernist +##lai +hodges +smelling +aristocratic +baptized +dismiss +justification +oilers +##now +coupling +qin +snack +healer +##qing +gardener +layla +battled +formulated +stephenson +gravitational +##gill +##jun +1768 +granny +coordinating +suites +##cd +##ioned +monarchs +##cote +##hips +sep +blended +apr +barrister +deposition +fia +mina +policemen +paranoid +##pressed +churchyard +covert +crumpled +creep +abandoning +tr +transmit +conceal +barr +understands +readiness +spire +##cology +##enia +##erry +610 +startling +unlock +vida +bowled +slots +##nat +##islav +spaced +trusting +admire +rig +##ink +slack +##70 +mv +207 +casualty +##wei +classmates +##odes +##rar +##rked +amherst +furnished +evolve +foundry +menace +mead +##lein +flu +wesleyan +##kled +monterey +webber +##vos +wil +##mith +##на +bartholomew +justices +restrained +##cke +amenities +191 +mediated +sewage +trenches +ml +mainz +##thus +1800s +##cula +##inski +caine +bonding +213 +converts +spheres +superseded +marianne +crypt +sweaty +ensign +historia +##br +spruce +##post +##ask +forks +thoughtfully +yukon +pamphlet +ames +##uter +karma +##yya +bryn +negotiation +sighs +incapable +##mbre +##ntial +actresses +taft +##mill +luce +prevailed +##amine +1773 +motionless +envoy +testify +investing +sculpted +instructors +provence +kali +cullen +horseback +##while +goodwin +##jos +gaa +norte +##ldon +modify +wavelength +abd +214 +skinned +sprinter +forecast +scheduling +marries +squared +tentative +##chman +boer +##isch +bolts +swap +fisherman +assyrian +impatiently +guthrie +martins +murdoch +194 +tanya +nicely +dolly +lacy +med +##45 +syn +decks +fashionable +millionaire +##ust +surfing +##ml +##ision +heaved +tammy +consulate +attendees +routinely +197 +fuse +saxophonist +backseat +malaya +##lord +scowl +tau +##ishly +193 +sighted +steaming +##rks +303 +911 +##holes +##hong +ching +##wife +bless +conserved +jurassic +stacey +unix +zion +chunk +rigorous +blaine +198 +peabody +slayer +dismay +brewers +nz +##jer +det +##glia +glover +postwar +int +penetration +sylvester +imitation +vertically +airlift +heiress +knoxville +viva +##uin +390 +macon +##rim +##fighter +##gonal +janice +##orescence +##wari +marius +belongings +leicestershire +196 +blanco +inverted +preseason +sanity +sobbing +##due +##elt +##dled +collingwood +regeneration +flickering +shortest +##mount +##osi +feminism +##lat +sherlock +cabinets +fumbled +northbound +precedent +snaps +##mme +researching +##akes +guillaume +insights +manipulated +vapor +neighbour +sap +gangster +frey +f1 +stalking +scarcely +callie +barnett +tendencies +audi +doomed +assessing +slung +panchayat +ambiguous +bartlett +##etto +distributing +violating +wolverhampton +##hetic +swami +histoire +##urus +liable +pounder +groin +hussain +larsen +popping +surprises +##atter +vie +curt +##station +mute +relocate +musicals +authorization +richter +##sef +immortality +tna +bombings +##press +deteriorated +yiddish +##acious +robbed +colchester +cs +pmid +ao +verified +balancing +apostle +swayed +recognizable +oxfordshire +retention +nottinghamshire +contender +judd +invitational +shrimp +uhf +##icient +cleaner +longitudinal +tanker +##mur +acronym +broker +koppen +sundance +suppliers +##gil +4000 +clipped +fuels +petite +##anne +landslide +helene +diversion +populous +landowners +auspices +melville +quantitative +##xes +ferries +nicky +##llus +doo +haunting +roche +carver +downed +unavailable +##pathy +approximation +hiroshima +##hue +garfield +valle +comparatively +keyboardist +traveler +##eit +congestion +calculating +subsidiaries +##bate +serb +modernization +fairies +deepened +ville +averages +##lore +inflammatory +tonga +##itch +co₂ +squads +##hea +gigantic +serum +enjoyment +retailer +verona +35th +cis +##phobic +magna +technicians +##vati +arithmetic +##sport +levin +##dation +amtrak +chow +sienna +##eyer +backstage +entrepreneurship +##otic +learnt +tao +##udy +worcestershire +formulation +baggage +hesitant +bali +sabotage +##kari +barren +enhancing +murmur +pl +freshly +putnam +syntax +aces +medicines +resentment +bandwidth +##sier +grins +chili +guido +##sei +framing +implying +gareth +lissa +genevieve +pertaining +admissions +geo +thorpe +proliferation +sato +bela +analyzing +parting +##gor +awakened +##isman +huddled +secrecy +##kling +hush +gentry +540 +dungeons +##ego +coasts +##utz +sacrificed +##chule +landowner +mutually +prevalence +programmer +adolescent +disrupted +seaside +gee +trusts +vamp +georgie +##nesian +##iol +schedules +sindh +##market +etched +hm +sparse +bey +beaux +scratching +gliding +unidentified +216 +collaborating +gems +jesuits +oro +accumulation +shaping +mbe +anal +##xin +231 +enthusiasts +newscast +##egan +janata +dewey +parkinson +179 +ankara +biennial +towering +dd +inconsistent +950 +##chet +thriving +terminate +cabins +furiously +eats +advocating +donkey +marley +muster +phyllis +leiden +##user +grassland +glittering +iucn +loneliness +217 +memorandum +armenians +##ddle +popularized +rhodesia +60s +lame +##illon +sans +bikini +header +orbits +##xx +##finger +##ulator +sharif +spines +biotechnology +strolled +naughty +yates +##wire +fremantle +milo +##mour +abducted +removes +##atin +humming +wonderland +##chrome +##ester +hume +pivotal +##rates +armand +grams +believers +elector +rte +apron +bis +scraped +##yria +endorsement +initials +##llation +eps +dotted +hints +buzzing +emigration +nearer +##tom +indicators +##ulu +coarse +neutron +protectorate +##uze +directional +exploits +pains +loire +1830s +proponents +guggenheim +rabbits +ritchie +305 +hectare +inputs +hutton +##raz +verify +##ako +boilers +longitude +##lev +skeletal +yer +emilia +citrus +compromised +##gau +pokemon +prescription +paragraph +eduard +cadillac +attire +categorized +kenyan +weddings +charley +##bourg +entertain +monmouth +##lles +nutrients +davey +mesh +incentive +practised +ecosystems +kemp +subdued +overheard +##rya +bodily +maxim +##nius +apprenticeship +ursula +##fight +lodged +rug +silesian +unconstitutional +patel +inspected +coyote +unbeaten +##hak +34th +disruption +convict +parcel +##cl +##nham +collier +implicated +mallory +##iac +##lab +susannah +winkler +##rber +shia +phelps +sediments +graphical +robotic +##sner +adulthood +mart +smoked +##isto +kathryn +clarified +##aran +divides +convictions +oppression +pausing +burying +##mt +federico +mathias +eileen +##tana +kite +hunched +##acies +189 +##atz +disadvantage +liza +kinetic +greedy +paradox +yokohama +dowager +trunks +ventured +##gement +gupta +vilnius +olaf +##thest +crimean +hopper +##ej +progressively +arturo +mouthed +arrondissement +##fusion +rubin +simulcast +oceania +##orum +##stra +##rred +busiest +intensely +navigator +cary +##vine +##hini +##bies +fife +rowe +rowland +posing +insurgents +shafts +lawsuits +activate +conor +inward +culturally +garlic +265 +##eering +eclectic +##hui +##kee +##nl +furrowed +vargas +meteorological +rendezvous +##aus +culinary +commencement +##dition +quota +##notes +mommy +salaries +overlapping +mule +##iology +##mology +sums +wentworth +##isk +##zione +mainline +subgroup +##illy +hack +plaintiff +verdi +bulb +differentiation +engagements +multinational +supplemented +bertrand +caller +regis +##naire +##sler +##arts +##imated +blossom +propagation +kilometer +viaduct +vineyards +##uate +beckett +optimization +golfer +songwriters +seminal +semitic +thud +volatile +evolving +ridley +##wley +trivial +distributions +scandinavia +jiang +##ject +wrestled +insistence +##dio +emphasizes +napkin +##ods +adjunct +rhyme +##ricted +##eti +hopeless +surrounds +tremble +32nd +smoky +##ntly +oils +medicinal +padded +steer +wilkes +219 +255 +concessions +hue +uniquely +blinded +landon +yahoo +##lane +hendrix +commemorating +dex +specify +chicks +##ggio +intercity +1400 +morley +##torm +highlighting +##oting +pang +oblique +stalled +##liner +flirting +newborn +1769 +bishopric +shaved +232 +currie +##ush +dharma +spartan +##ooped +favorites +smug +novella +sirens +abusive +creations +espana +##lage +paradigm +semiconductor +sheen +##rdo +##yen +##zak +nrl +renew +##pose +##tur +adjutant +marches +norma +##enity +ineffective +weimar +grunt +##gat +lordship +plotting +expenditure +infringement +lbs +refrain +av +mimi +mistakenly +postmaster +1771 +##bara +ras +motorsports +tito +199 +subjective +##zza +bully +stew +##kaya +prescott +1a +##raphic +##zam +bids +styling +paranormal +reeve +sneaking +exploding +katz +akbar +migrant +syllables +indefinitely +##ogical +destroys +replaces +applause +##phine +pest +##fide +218 +articulated +bertie +##thing +##cars +##ptic +courtroom +crowley +aesthetics +cummings +tehsil +hormones +titanic +dangerously +##ibe +stadion +jaenelle +auguste +ciudad +##chu +mysore +partisans +##sio +lucan +philipp +##aly +debating +henley +interiors +##rano +##tious +homecoming +beyonce +usher +henrietta +prepares +weeds +##oman +ely +plucked +##pire +##dable +luxurious +##aq +artifact +password +pasture +juno +maddy +minsk +##dder +##ologies +##rone +assessments +martian +royalist +1765 +examines +##mani +##rge +nino +223 +parry +scooped +relativity +##eli +##uting +##cao +congregational +noisy +traverse +##agawa +strikeouts +nickelodeon +obituary +transylvania +binds +depictions +polk +trolley +##yed +##lard +breeders +##under +dryly +hokkaido +1762 +strengths +stacks +bonaparte +connectivity +neared +prostitutes +stamped +anaheim +gutierrez +sinai +##zzling +bram +fresno +madhya +##86 +proton +##lena +##llum +##phon +reelected +wanda +##anus +##lb +ample +distinguishing +##yler +grasping +sermons +tomato +bland +stimulation +avenues +##eux +spreads +scarlett +fern +pentagon +assert +baird +chesapeake +ir +calmed +distortion +fatalities +##olis +correctional +pricing +##astic +##gina +prom +dammit +ying +collaborate +##chia +welterweight +33rd +pointer +substitution +bonded +umpire +communicating +multitude +paddle +##obe +federally +intimacy +##insky +betray +ssr +##lett +##lean +##lves +##therapy +airbus +##tery +functioned +ud +bearer +biomedical +netflix +##hire +##nca +condom +brink +ik +##nical +macy +##bet +flap +gma +experimented +jelly +lavender +##icles +##ulia +munro +##mian +##tial +rye +##rle +60th +gigs +hottest +rotated +predictions +fuji +bu +##erence +##omi +barangay +##fulness +##sas +clocks +##rwood +##liness +cereal +roe +wight +decker +uttered +babu +onion +xml +forcibly +##df +petra +sarcasm +hartley +peeled +storytelling +##42 +##xley +##ysis +##ffa +fibre +kiel +auditor +fig +harald +greenville +##berries +geographically +nell +quartz +##athic +cemeteries +##lr +crossings +nah +holloway +reptiles +chun +sichuan +snowy +660 +corrections +##ivo +zheng +ambassadors +blacksmith +fielded +fluids +hardcover +turnover +medications +melvin +academies +##erton +ro +roach +absorbing +spaniards +colton +##founded +outsider +espionage +kelsey +245 +edible +##ulf +dora +establishes +##sham +##tries +contracting +##tania +cinematic +costello +nesting +##uron +connolly +duff +##nology +mma +##mata +fergus +sexes +gi +optics +spectator +woodstock +banning +##hee +##fle +differentiate +outfielder +refinery +226 +312 +gerhard +horde +lair +drastically +##udi +landfall +##cheng +motorsport +odi +##achi +predominant +quay +skins +##ental +edna +harshly +complementary +murdering +##aves +wreckage +##90 +ono +outstretched +lennox +munitions +galen +reconcile +470 +scalp +bicycles +gillespie +questionable +rosenberg +guillermo +hostel +jarvis +kabul +volvo +opium +yd +##twined +abuses +decca +outpost +##cino +sensible +neutrality +##64 +ponce +anchorage +atkins +turrets +inadvertently +disagree +libre +vodka +reassuring +weighs +##yal +glide +jumper +ceilings +repertory +outs +stain +##bial +envy +##ucible +smashing +heightened +policing +hyun +mixes +lai +prima +##ples +celeste +##bina +lucrative +intervened +kc +manually +##rned +stature +staffed +bun +bastards +nairobi +priced +##auer +thatcher +##kia +tripped +comune +##ogan +##pled +brasil +incentives +emanuel +hereford +musica +##kim +benedictine +biennale +##lani +eureka +gardiner +rb +knocks +sha +##ael +##elled +##onate +efficacy +ventura +masonic +sanford +maize +leverage +##feit +capacities +santana +##aur +novelty +vanilla +##cter +##tour +benin +##oir +##rain +neptune +drafting +tallinn +##cable +humiliation +##boarding +schleswig +fabian +bernardo +liturgy +spectacle +sweeney +pont +routledge +##tment +cosmos +ut +hilt +sleek +universally +##eville +##gawa +typed +##dry +favors +allegheny +glaciers +##rly +recalling +aziz +##log +parasite +requiem +auf +##berto +##llin +illumination +##breaker +##issa +festivities +bows +govern +vibe +vp +333 +sprawled +larson +pilgrim +bwf +leaping +##rts +##ssel +alexei +greyhound +hoarse +##dler +##oration +seneca +##cule +gaping +##ulously +##pura +cinnamon +##gens +##rricular +craven +fantasies +houghton +engined +reigned +dictator +supervising +##oris +bogota +commentaries +unnatural +fingernails +spirituality +tighten +##tm +canadiens +protesting +intentional +cheers +sparta +##ytic +##iere +##zine +widen +belgarath +controllers +dodd +iaaf +navarre +##ication +defect +squire +steiner +whisky +##mins +560 +inevitably +tome +##gold +chew +##uid +##lid +elastic +##aby +streaked +alliances +jailed +regal +##ined +##phy +czechoslovak +narration +absently +##uld +bluegrass +guangdong +quran +criticizing +hose +hari +##liest +##owa +skier +streaks +deploy +##lom +raft +bose +dialed +huff +##eira +haifa +simplest +bursting +endings +ib +sultanate +##titled +franks +whitman +ensures +sven +##ggs +collaborators +forster +organising +ui +banished +napier +injustice +teller +layered +thump +##otti +roc +battleships +evidenced +fugitive +sadie +robotics +##roud +equatorial +geologist +##iza +yielding +##bron +##sr +internationale +mecca +##diment +sbs +skyline +toad +uploaded +reflective +undrafted +lal +leafs +bayern +##dai +lakshmi +shortlisted +##stick +##wicz +camouflage +donate +af +christi +lau +##acio +disclosed +nemesis +1761 +assemble +straining +northamptonshire +tal +##asi +bernardino +premature +heidi +42nd +coefficients +galactic +reproduce +buzzed +sensations +zionist +monsieur +myrtle +##eme +archery +strangled +musically +viewpoint +antiquities +bei +trailers +seahawks +cured +pee +preferring +tasmanian +lange +sul +##mail +##working +colder +overland +lucivar +massey +gatherings +haitian +##smith +disapproval +flaws +##cco +##enbach +1766 +npr +##icular +boroughs +creole +forums +techno +1755 +dent +abdominal +streetcar +##eson +##stream +procurement +gemini +predictable +##tya +acheron +christoph +feeder +fronts +vendor +bernhard +jammu +tumors +slang +##uber +goaltender +twists +curving +manson +vuelta +mer +peanut +confessions +pouch +unpredictable +allowance +theodor +vascular +##factory +bala +authenticity +metabolic +coughing +nanjing +##cea +pembroke +##bard +splendid +36th +ff +hourly +##ahu +elmer +handel +##ivate +awarding +thrusting +dl +experimentation +##hesion +##46 +caressed +entertained +steak +##rangle +biologist +orphans +baroness +oyster +stepfather +##dridge +mirage +reefs +speeding +##31 +barons +1764 +227 +inhabit +preached +repealed +##tral +honoring +boogie +captives +administer +johanna +##imate +gel +suspiciously +1767 +sobs +##dington +backbone +hayward +garry +##folding +##nesia +maxi +##oof +##ppe +ellison +galileo +##stand +crimea +frenzy +amour +bumper +matrices +natalia +baking +garth +palestinians +##grove +smack +conveyed +ensembles +gardening +##manship +##rup +##stituting +1640 +harvesting +topography +jing +shifters +dormitory +##carriage +##lston +ist +skulls +##stadt +dolores +jewellery +sarawak +##wai +##zier +fences +christy +confinement +tumbling +credibility +fir +stench +##bria +##plication +##nged +##sam +virtues +##belt +marjorie +pba +##eem +##made +celebrates +schooner +agitated +barley +fulfilling +anthropologist +##pro +restrict +novi +regulating +##nent +padres +##rani +##hesive +loyola +tabitha +milky +olson +proprietor +crambidae +guarantees +intercollegiate +ljubljana +hilda +##sko +ignorant +hooded +##lts +sardinia +##lidae +##vation +frontman +privileged +witchcraft +##gp +jammed +laude +poking +##than +bracket +amazement +yunnan +##erus +maharaja +linnaeus +264 +commissioning +milano +peacefully +##logies +akira +rani +regulator +##36 +grasses +##rance +luzon +crows +compiler +gretchen +seaman +edouard +tab +buccaneers +ellington +hamlets +whig +socialists +##anto +directorial +easton +mythological +##kr +##vary +rhineland +semantic +taut +dune +inventions +succeeds +##iter +replication +branched +##pired +jul +prosecuted +kangaroo +penetrated +##avian +middlesbrough +doses +bleak +madam +predatory +relentless +##vili +reluctance +##vir +hailey +crore +silvery +1759 +monstrous +swimmers +transmissions +hawthorn +informing +##eral +toilets +caracas +crouch +kb +##sett +295 +cartel +hadley +##aling +alexia +yvonne +##biology +cinderella +eton +superb +blizzard +stabbing +industrialist +maximus +##gm +##orus +groves +maud +clade +oversized +comedic +##bella +rosen +nomadic +fulham +montane +beverages +galaxies +redundant +swarm +##rot +##folia +##llis +buckinghamshire +fen +bearings +bahadur +##rom +gilles +phased +dynamite +faber +benoit +vip +##ount +##wd +booking +fractured +tailored +anya +spices +westwood +cairns +auditions +inflammation +steamed +##rocity +##acion +##urne +skyla +thereof +watford +torment +archdeacon +transforms +lulu +demeanor +fucked +serge +##sor +mckenna +minas +entertainer +##icide +caress +originate +residue +##sty +1740 +##ilised +##org +beech +##wana +subsidies +##ghton +emptied +gladstone +ru +firefighters +voodoo +##rcle +het +nightingale +tamara +edmond +ingredient +weaknesses +silhouette +285 +compatibility +withdrawing +hampson +##mona +anguish +giggling +##mber +bookstore +##jiang +southernmost +tilting +##vance +bai +economical +rf +briefcase +dreadful +hinted +projections +shattering +totaling +##rogate +analogue +indicted +periodical +fullback +##dman +haynes +##tenberg +##ffs +##ishment +1745 +thirst +stumble +penang +vigorous +##ddling +##kor +##lium +octave +##ove +##enstein +##inen +##ones +siberian +##uti +cbn +repeal +swaying +##vington +khalid +tanaka +unicorn +otago +plastered +lobe +riddle +##rella +perch +##ishing +croydon +filtered +graeme +tripoli +##ossa +crocodile +##chers +sufi +mined +##tung +inferno +lsu +##phi +swelled +utilizes +£2 +cale +periodicals +styx +hike +informally +coop +lund +##tidae +ala +hen +qui +transformations +disposed +sheath +chickens +##cade +fitzroy +sas +silesia +unacceptable +odisha +1650 +sabrina +pe +spokane +ratios +athena +massage +shen +dilemma +##drum +##riz +##hul +corona +doubtful +niall +##pha +##bino +fines +cite +acknowledging +bangor +ballard +bathurst +##resh +huron +mustered +alzheimer +garments +kinase +tyre +warship +##cp +flashback +pulmonary +braun +cheat +kamal +cyclists +constructions +grenades +ndp +traveller +excuses +stomped +signalling +trimmed +futsal +mosques +relevance +##wine +wta +##23 +##vah +##lter +hoc +##riding +optimistic +##´s +deco +sim +interacting +rejecting +moniker +waterways +##ieri +##oku +mayors +gdansk +outnumbered +pearls +##ended +##hampton +fairs +totals +dominating +262 +notions +stairway +compiling +pursed +commodities +grease +yeast +##jong +carthage +griffiths +residual +amc +contraction +laird +sapphire +##marine +##ivated +amalgamation +dissolve +inclination +lyle +packaged +altitudes +suez +canons +graded +lurched +narrowing +boasts +guise +wed +enrico +##ovsky +rower +scarred +bree +cub +iberian +protagonists +bargaining +proposing +trainers +voyages +vans +fishes +##aea +##ivist +##verance +encryption +artworks +kazan +sabre +cleopatra +hepburn +rotting +supremacy +mecklenburg +##brate +burrows +hazards +outgoing +flair +organizes +##ctions +scorpion +##usions +boo +234 +chevalier +dunedin +slapping +##34 +ineligible +pensions +##38 +##omic +manufactures +emails +bismarck +238 +weakening +blackish +ding +mcgee +quo +##rling +northernmost +xx +manpower +greed +sampson +clicking +##ange +##horpe +##inations +##roving +torre +##eptive +##moral +symbolism +38th +asshole +meritorious +outfits +splashed +biographies +sprung +astros +##tale +302 +737 +filly +raoul +nw +tokugawa +linden +clubhouse +##apa +tracts +romano +##pio +putin +tags +##note +chained +dickson +gunshot +moe +gunn +rashid +##tails +zipper +##bas +##nea +contrasted +##ply +##udes +plum +pharaoh +##pile +aw +comedies +ingrid +sandwiches +subdivisions +1100 +mariana +nokia +kamen +hz +delaney +veto +herring +##words +possessive +outlines +##roup +siemens +stairwell +rc +gallantry +messiah +palais +yells +233 +zeppelin +##dm +bolivar +##cede +smackdown +mckinley +##mora +##yt +muted +geologic +finely +unitary +avatar +hamas +maynard +rees +bog +contrasting +##rut +liv +chico +disposition +pixel +##erate +becca +dmitry +yeshiva +narratives +##lva +##ulton +mercenary +sharpe +tempered +navigate +stealth +amassed +keynes +##lini +untouched +##rrie +havoc +lithium +##fighting +abyss +graf +southward +wolverine +balloons +implements +ngos +transitions +##icum +ambushed +concacaf +dormant +economists +##dim +costing +csi +rana +universite +boulders +verity +##llon +collin +mellon +misses +cypress +fluorescent +lifeless +spence +##ulla +crewe +shepard +pak +revelations +##م +jolly +gibbons +paw +##dro +##quel +freeing +##test +shack +fries +palatine +##51 +##hiko +accompaniment +cruising +recycled +##aver +erwin +sorting +synthesizers +dyke +realities +sg +strides +enslaved +wetland +##ghan +competence +gunpowder +grassy +maroon +reactors +objection +##oms +carlson +gearbox +macintosh +radios +shelton +##sho +clergyman +prakash +254 +mongols +trophies +oricon +228 +stimuli +twenty20 +cantonese +cortes +mirrored +##saurus +bhp +cristina +melancholy +##lating +enjoyable +nuevo +##wny +downfall +schumacher +##ind +banging +lausanne +rumbled +paramilitary +reflex +ax +amplitude +migratory +##gall +##ups +midi +barnard +lastly +sherry +##hp +##nall +keystone +##kra +carleton +slippery +##53 +coloring +foe +socket +otter +##rgos +mats +##tose +consultants +bafta +bison +topping +##km +490 +primal +abandonment +transplant +atoll +hideous +mort +pained +reproduced +tae +howling +##turn +unlawful +billionaire +hotter +poised +lansing +##chang +dinamo +retro +messing +nfc +domesday +##mina +blitz +timed +##athing +##kley +ascending +gesturing +##izations +signaled +tis +chinatown +mermaid +savanna +jameson +##aint +catalina +##pet +##hers +cochrane +cy +chatting +##kus +alerted +computation +mused +noelle +majestic +mohawk +campo +octagonal +##sant +##hend +241 +aspiring +##mart +comprehend +iona +paralyzed +shimmering +swindon +rhone +##eley +reputed +configurations +pitchfork +agitation +francais +gillian +lipstick +##ilo +outsiders +pontifical +resisting +bitterness +sewer +rockies +##edd +##ucher +misleading +1756 +exiting +galloway +##nging +risked +##heart +246 +commemoration +schultz +##rka +integrating +##rsa +poses +shrieked +##weiler +guineas +gladys +jerking +owls +goldsmith +nightly +penetrating +##unced +lia +##33 +ignited +betsy +##aring +##thorpe +follower +vigorously +##rave +coded +kiran +knit +zoology +tbilisi +##28 +##bered +repository +govt +deciduous +dino +growling +##bba +enhancement +unleashed +chanting +pussy +biochemistry +##eric +kettle +repression +toxicity +nrhp +##arth +##kko +##bush +ernesto +commended +outspoken +242 +mca +parchment +sms +kristen +##aton +bisexual +raked +glamour +navajo +a2 +conditioned +showcased +##hma +spacious +youthful +##esa +usl +appliances +junta +brest +layne +conglomerate +enchanted +chao +loosened +picasso +circulating +inspect +montevideo +##centric +##kti +piazza +spurred +##aith +bari +freedoms +poultry +stamford +lieu +##ect +indigo +sarcastic +bahia +stump +attach +dvds +frankenstein +lille +approx +scriptures +pollen +##script +nmi +overseen +##ivism +tides +proponent +newmarket +inherit +milling +##erland +centralized +##rou +distributors +credentials +drawers +abbreviation +##lco +##xon +downing +uncomfortably +ripe +##oes +erase +franchises +##ever +populace +##bery +##khar +decomposition +pleas +##tet +daryl +sabah +##stle +##wide +fearless +genie +lesions +annette +##ogist +oboe +appendix +nair +dripped +petitioned +maclean +mosquito +parrot +rpg +hampered +1648 +operatic +reservoirs +##tham +irrelevant +jolt +summarized +##fp +medallion +##taff +##− +clawed +harlow +narrower +goddard +marcia +bodied +fremont +suarez +altering +tempest +mussolini +porn +##isms +sweetly +oversees +walkers +solitude +grimly +shrines +hk +ich +supervisors +hostess +dietrich +legitimacy +brushes +expressive +##yp +dissipated +##rse +localized +systemic +##nikov +gettysburg +##js +##uaries +dialogues +muttering +251 +housekeeper +sicilian +discouraged +##frey +beamed +kaladin +halftime +kidnap +##amo +##llet +1754 +synonymous +depleted +instituto +insulin +reprised +##opsis +clashed +##ctric +interrupting +radcliffe +insisting +medici +1715 +ejected +playfully +turbulent +##47 +starvation +##rini +shipment +rebellious +petersen +verification +merits +##rified +cakes +##charged +1757 +milford +shortages +spying +fidelity +##aker +emitted +storylines +harvested +seismic +##iform +cheung +kilda +theoretically +barbie +lynx +##rgy +##tius +goblin +mata +poisonous +##nburg +reactive +residues +obedience +##евич +conjecture +##rac +401 +hating +sixties +kicker +moaning +motown +##bha +emancipation +neoclassical +##hering +consoles +ebert +professorship +##tures +sustaining +assaults +obeyed +affluent +incurred +tornadoes +##eber +##zow +emphasizing +highlanders +cheated +helmets +##ctus +internship +terence +bony +executions +legislators +berries +peninsular +tinged +##aco +1689 +amplifier +corvette +ribbons +lavish +pennant +##lander +worthless +##chfield +##forms +mariano +pyrenees +expenditures +##icides +chesterfield +mandir +tailor +39th +sergey +nestled +willed +aristocracy +devotees +goodnight +raaf +rumored +weaponry +remy +appropriations +harcourt +burr +riaa +##lence +limitation +unnoticed +guo +soaking +swamps +##tica +collapsing +tatiana +descriptive +brigham +psalm +##chment +maddox +##lization +patti +caliph +##aja +akron +injuring +serra +##ganj +basins +##sari +astonished +launcher +##church +hilary +wilkins +sewing +##sf +stinging +##fia +##ncia +underwood +startup +##ition +compilations +vibrations +embankment +jurist +##nity +bard +juventus +groundwater +kern +palaces +helium +boca +cramped +marissa +soto +##worm +jae +princely +##ggy +faso +bazaar +warmly +##voking +229 +pairing +##lite +##grate +##nets +wien +freaked +ulysses +rebirth +##alia +##rent +mummy +guzman +jimenez +stilled +##nitz +trajectory +tha +woken +archival +professions +##pts +##pta +hilly +shadowy +shrink +##bolt +norwood +glued +migrate +stereotypes +devoid +##pheus +625 +evacuate +horrors +infancy +gotham +knowles +optic +downloaded +sachs +kingsley +parramatta +darryl +mor +##onale +shady +commence +confesses +kan +##meter +##placed +marlborough +roundabout +regents +frigates +io +##imating +gothenburg +revoked +carvings +clockwise +convertible +intruder +##sche +banged +##ogo +vicky +bourgeois +##mony +dupont +footing +##gum +pd +##real +buckle +yun +penthouse +sane +720 +serviced +stakeholders +neumann +bb +##eers +comb +##gam +catchment +pinning +rallies +typing +##elles +forefront +freiburg +sweetie +giacomo +widowed +goodwill +worshipped +aspirations +midday +##vat +fishery +##trick +bournemouth +turk +243 +hearth +ethanol +guadalajara +murmurs +sl +##uge +afforded +scripted +##hta +wah +##jn +coroner +translucent +252 +memorials +puck +progresses +clumsy +##race +315 +candace +recounted +##27 +##slin +##uve +filtering +##mac +howl +strata +heron +leveled +##ays +dubious +##oja +##т +##wheel +citations +exhibiting +##laya +##mics +##pods +turkic +##lberg +injunction +##ennial +##mit +antibodies +##44 +organise +##rigues +cardiovascular +cushion +inverness +##zquez +dia +cocoa +sibling +##tman +##roid +expanse +feasible +tunisian +algiers +##relli +rus +bloomberg +dso +westphalia +bro +tacoma +281 +downloads +##ours +konrad +duran +##hdi +continuum +jett +compares +legislator +secession +##nable +##gues +##zuka +translating +reacher +##gley +##ła +aleppo +##agi +tc +orchards +trapping +linguist +versatile +drumming +postage +calhoun +superiors +##mx +barefoot +leary +##cis +ignacio +alfa +kaplan +##rogen +bratislava +mori +##vot +disturb +haas +313 +cartridges +gilmore +radiated +salford +tunic +hades +##ulsive +archeological +delilah +magistrates +auditioned +brewster +charters +empowerment +blogs +cappella +dynasties +iroquois +whipping +##krishna +raceway +truths +myra +weaken +judah +mcgregor +##horse +mic +refueling +37th +burnley +bosses +markus +premio +query +##gga +dunbar +##economic +darkest +lyndon +sealing +commendation +reappeared +##mun +addicted +ezio +slaughtered +satisfactory +shuffle +##eves +##thic +##uj +fortification +warrington +##otto +resurrected +fargo +mane +##utable +##lei +##space +foreword +ox +##aris +##vern +abrams +hua +##mento +sakura +##alo +uv +sentimental +##skaya +midfield +##eses +sturdy +scrolls +macleod +##kyu +entropy +##lance +mitochondrial +cicero +excelled +thinner +convoys +perceive +##oslav +##urable +systematically +grind +burkina +287 +##tagram +ops +##aman +guantanamo +##cloth +##tite +forcefully +wavy +##jou +pointless +##linger +##tze +layton +portico +superficial +clerical +outlaws +##hism +burials +muir +##inn +creditors +hauling +rattle +##leg +calais +monde +archers +reclaimed +dwell +wexford +hellenic +falsely +remorse +##tek +dough +furnishings +##uttered +gabon +neurological +novice +##igraphy +contemplated +pulpit +nightstand +saratoga +##istan +documenting +pulsing +taluk +##firmed +busted +marital +##rien +disagreements +wasps +##yes +hodge +mcdonnell +mimic +fran +pendant +dhabi +musa +##nington +congratulations +argent +darrell +concussion +losers +regrets +thessaloniki +reversal +donaldson +hardwood +thence +achilles +ritter +##eran +demonic +jurgen +prophets +goethe +eki +classmate +buff +##cking +yank +irrational +##inging +perished +seductive +qur +sourced +##crat +##typic +mustard +ravine +barre +horizontally +characterization +phylogenetic +boise +##dit +##runner +##tower +brutally +intercourse +seduce +##bbing +fay +ferris +ogden +amar +nik +unarmed +##inator +evaluating +kyrgyzstan +sweetness +##lford +##oki +mccormick +meiji +notoriety +stimulate +disrupt +figuring +instructional +mcgrath +##zoo +groundbreaking +##lto +flinch +khorasan +agrarian +bengals +mixer +radiating +##sov +ingram +pitchers +nad +tariff +##cript +tata +##codes +##emi +##ungen +appellate +lehigh +##bled +##giri +brawl +duct +texans +##ciation +##ropolis +skipper +speculative +vomit +doctrines +stresses +253 +davy +graders +whitehead +jozef +timely +cumulative +haryana +paints +appropriately +boon +cactus +##ales +##pid +dow +legions +##pit +perceptions +1730 +picturesque +##yse +periphery +rune +wr +##aha +celtics +sentencing +whoa +##erin +confirms +variance +425 +moines +mathews +spade +rave +m1 +fronted +fx +blending +alleging +reared +##gl +237 +##paper +grassroots +eroded +##free +##physical +directs +ordeal +##sław +accelerate +hacker +rooftop +##inia +lev +buys +cebu +devote +##lce +specialising +##ulsion +choreographed +repetition +warehouses +##ryl +paisley +tuscany +analogy +sorcerer +hash +huts +shards +descends +exclude +nix +chaplin +gaga +ito +vane +##drich +causeway +misconduct +limo +orchestrated +glands +jana +##kot +u2 +##mple +##sons +branching +contrasts +scoop +longed +##virus +chattanooga +##75 +syrup +cornerstone +##tized +##mind +##iaceae +careless +precedence +frescoes +##uet +chilled +consult +modelled +snatch +peat +##thermal +caucasian +humane +relaxation +spins +temperance +##lbert +occupations +lambda +hybrids +moons +mp3 +##oese +247 +rolf +societal +yerevan +ness +##ssler +befriended +mechanized +nominate +trough +boasted +cues +seater +##hom +bends +##tangle +conductors +emptiness +##lmer +eurasian +adriatic +tian +##cie +anxiously +lark +propellers +chichester +jock +ev +2a +##holding +credible +recounts +tori +loyalist +abduction +##hoot +##redo +nepali +##mite +ventral +tempting +##ango +##crats +steered +##wice +javelin +dipping +laborers +prentice +looming +titanium +##ː +badges +emir +tensor +##ntation +egyptians +rash +denies +hawthorne +lombard +showers +wehrmacht +dietary +trojan +##reus +welles +executing +horseshoe +lifeboat +##lak +elsa +infirmary +nearing +roberta +boyer +mutter +trillion +joanne +##fine +##oked +sinks +vortex +uruguayan +clasp +sirius +##block +accelerator +prohibit +sunken +byu +chronological +diplomats +ochreous +510 +symmetrical +1644 +maia +##tology +salts +reigns +atrocities +##ия +hess +bared +issn +##vyn +cater +saturated +##cycle +##isse +sable +voyager +dyer +yusuf +##inge +fountains +wolff +##39 +##nni +engraving +rollins +atheist +ominous +##ault +herr +chariot +martina +strung +##fell +##farlane +horrific +sahib +gazes +saetan +erased +ptolemy +##olic +flushing +lauderdale +analytic +##ices +530 +navarro +beak +gorilla +herrera +broom +guadalupe +raiding +sykes +311 +bsc +deliveries +1720 +invasions +carmichael +tajikistan +thematic +ecumenical +sentiments +onstage +##rians +##brand +##sume +catastrophic +flanks +molten +##arns +waller +aimee +terminating +##icing +alternately +##oche +nehru +printers +outraged +##eving +empires +template +banners +repetitive +za +##oise +vegetarian +##tell +guiana +opt +cavendish +lucknow +synthesized +##hani +##mada +finalized +##ctable +fictitious +mayoral +unreliable +##enham +embracing +peppers +rbis +##chio +##neo +inhibition +slashed +togo +orderly +embroidered +safari +salty +236 +barron +benito +totaled +##dak +pubs +simulated +caden +devin +tolkien +momma +welding +sesame +##ept +gottingen +hardness +630 +shaman +temeraire +620 +adequately +pediatric +##kit +ck +assertion +radicals +composure +cadence +seafood +beaufort +lazarus +mani +warily +cunning +kurdistan +249 +cantata +##kir +ares +##41 +##clusive +nape +townland +geared +insulted +flutter +boating +violate +draper +dumping +malmo +##hh +##romatic +firearm +alta +bono +obscured +##clave +exceeds +panorama +unbelievable +##train +preschool +##essed +disconnected +installing +rescuing +secretaries +accessibility +##castle +##drive +##ifice +##film +bouts +slug +waterway +mindanao +##buro +##ratic +halves +##ل +calming +liter +maternity +adorable +bragg +electrification +mcc +##dote +roxy +schizophrenia +##body +munoz +kaye +whaling +239 +mil +tingling +tolerant +##ago +unconventional +volcanoes +##finder +deportivo +##llie +robson +kaufman +neuroscience +wai +deportation +masovian +scraping +converse +##bh +hacking +bulge +##oun +administratively +yao +580 +amp +mammoth +booster +claremont +hooper +nomenclature +pursuits +mclaughlin +melinda +##sul +catfish +barclay +substrates +taxa +zee +originals +kimberly +packets +padma +##ality +borrowing +ostensibly +solvent +##bri +##genesis +##mist +lukas +shreveport +veracruz +##ь +##lou +##wives +cheney +tt +anatolia +hobbs +##zyn +cyclic +radiant +alistair +greenish +siena +dat +independents +##bation +conform +pieter +hyper +applicant +bradshaw +spores +telangana +vinci +inexpensive +nuclei +322 +jang +nme +soho +spd +##ign +cradled +receptionist +pow +##43 +##rika +fascism +##ifer +experimenting +##ading +##iec +##region +345 +jocelyn +maris +stair +nocturnal +toro +constabulary +elgin +##kker +msc +##giving +##schen +##rase +doherty +doping +sarcastically +batter +maneuvers +##cano +##apple +##gai +##git +intrinsic +##nst +##stor +1753 +showtime +cafes +gasps +lviv +ushered +##thed +fours +restart +astonishment +transmitting +flyer +shrugs +##sau +intriguing +cones +dictated +mushrooms +medial +##kovsky +##elman +escorting +gaped +##26 +godfather +##door +##sell +djs +recaptured +timetable +vila +1710 +3a +aerodrome +mortals +scientology +##orne +angelina +mag +convection +unpaid +insertion +intermittent +lego +##nated +endeavor +kota +pereira +##lz +304 +bwv +glamorgan +insults +agatha +fey +##cend +fleetwood +mahogany +protruding +steamship +zeta +##arty +mcguire +suspense +##sphere +advising +urges +##wala +hurriedly +meteor +gilded +inline +arroyo +stalker +##oge +excitedly +revered +##cure +earle +introductory +##break +##ilde +mutants +puff +pulses +reinforcement +##haling +curses +lizards +stalk +correlated +##fixed +fallout +macquarie +##unas +bearded +denton +heaving +802 +##ocation +winery +assign +dortmund +##lkirk +everest +invariant +charismatic +susie +##elling +bled +lesley +telegram +sumner +bk +##ogen +##к +wilcox +needy +colbert +duval +##iferous +##mbled +allotted +attends +imperative +##hita +replacements +hawker +##inda +insurgency +##zee +##eke +casts +##yla +680 +ives +transitioned +##pack +##powering +authoritative +baylor +flex +cringed +plaintiffs +woodrow +##skie +drastic +ape +aroma +unfolded +commotion +nt +preoccupied +theta +routines +lasers +privatization +wand +domino +ek +clenching +nsa +strategically +showered +bile +handkerchief +pere +storing +christophe +insulting +316 +nakamura +romani +asiatic +magdalena +palma +cruises +stripping +405 +konstantin +soaring +##berman +colloquially +forerunner +havilland +incarcerated +parasites +sincerity +##utus +disks +plank +saigon +##ining +corbin +homo +ornaments +powerhouse +##tlement +chong +fastened +feasibility +idf +morphological +usable +##nish +##zuki +aqueduct +jaguars +keepers +##flies +aleksandr +faust +assigns +ewing +bacterium +hurled +tricky +hungarians +integers +wallis +321 +yamaha +##isha +hushed +oblivion +aviator +evangelist +friars +##eller +monograph +ode +##nary +airplanes +labourers +charms +##nee +1661 +hagen +tnt +rudder +fiesta +transcript +dorothea +ska +inhibitor +maccabi +retorted +raining +encompassed +clauses +menacing +1642 +lineman +##gist +vamps +##ape +##dick +gloom +##rera +dealings +easing +seekers +##nut +##pment +helens +unmanned +##anu +##isson +basics +##amy +##ckman +adjustments +1688 +brutality +horne +##zell +sui +##55 +##mable +aggregator +##thal +rhino +##drick +##vira +counters +zoom +##01 +##rting +mn +montenegrin +packard +##unciation +##♭ +##kki +reclaim +scholastic +thugs +pulsed +##icia +syriac +quan +saddam +banda +kobe +blaming +buddies +dissent +##lusion +##usia +corbett +jaya +delle +erratic +lexie +##hesis +435 +amiga +hermes +##pressing +##leen +chapels +gospels +jamal +##uating +compute +revolving +warp +##sso +##thes +armory +##eras +##gol +antrim +loki +##kow +##asian +##good +##zano +braid +handwriting +subdistrict +funky +pantheon +##iculate +concurrency +estimation +improper +juliana +##his +newcomers +johnstone +staten +communicated +##oco +##alle +sausage +stormy +##stered +##tters +superfamily +##grade +acidic +collateral +tabloid +##oped +##rza +bladder +austen +##ellant +mcgraw +##hay +hannibal +mein +aquino +lucifer +wo +badger +boar +cher +christensen +greenberg +interruption +##kken +jem +244 +mocked +bottoms +cambridgeshire +##lide +sprawling +##bbly +eastwood +ghent +synth +##buck +advisers +##bah +nominally +hapoel +qu +daggers +estranged +fabricated +towels +vinnie +wcw +misunderstanding +anglia +nothin +unmistakable +##dust +##lova +chilly +marquette +truss +##edge +##erine +reece +##lty +##chemist +##connected +272 +308 +41st +bash +raion +waterfalls +##ump +##main +labyrinth +queue +theorist +##istle +bharatiya +flexed +soundtracks +rooney +leftist +patrolling +wharton +plainly +alleviate +eastman +schuster +topographic +engages +immensely +unbearable +fairchild +1620 +dona +lurking +parisian +oliveira +ia +indictment +hahn +bangladeshi +##aster +vivo +##uming +##ential +antonia +expects +indoors +kildare +harlan +##logue +##ogenic +##sities +forgiven +##wat +childish +tavi +##mide +##orra +plausible +grimm +successively +scooted +##bola +##dget +##rith +spartans +emery +flatly +azure +epilogue +##wark +flourish +##iny +##tracted +##overs +##oshi +bestseller +distressed +receipt +spitting +hermit +topological +##cot +drilled +subunit +francs +##layer +eel +##fk +##itas +octopus +footprint +petitions +ufo +##say +##foil +interfering +leaking +palo +##metry +thistle +valiant +##pic +narayan +mcpherson +##fast +gonzales +##ym +##enne +dustin +novgorod +solos +##zman +doin +##raph +##patient +##meyer +soluble +ashland +cuffs +carole +pendleton +whistling +vassal +##river +deviation +revisited +constituents +rallied +rotate +loomed +##eil +##nting +amateurs +augsburg +auschwitz +crowns +skeletons +##cona +bonnet +257 +dummy +globalization +simeon +sleeper +mandal +differentiated +##crow +##mare +milne +bundled +exasperated +talmud +owes +segregated +##feng +##uary +dentist +piracy +props +##rang +devlin +##torium +malicious +paws +##laid +dependency +##ergy +##fers +##enna +258 +pistons +rourke +jed +grammatical +tres +maha +wig +512 +ghostly +jayne +##achal +##creen +##ilis +##lins +##rence +designate +##with +arrogance +cambodian +clones +showdown +throttle +twain +##ception +lobes +metz +nagoya +335 +braking +##furt +385 +roaming +##minster +amin +crippled +##37 +##llary +indifferent +hoffmann +idols +intimidating +1751 +261 +influenza +memo +onions +1748 +bandage +consciously +##landa +##rage +clandestine +observes +swiped +tangle +##ener +##jected +##trum +##bill +##lta +hugs +congresses +josiah +spirited +##dek +humanist +managerial +filmmaking +inmate +rhymes +debuting +grimsby +ur +##laze +duplicate +vigor +##tf +republished +bolshevik +refurbishment +antibiotics +martini +methane +newscasts +royale +horizons +levant +iain +visas +##ischen +paler +##around +manifestation +snuck +alf +chop +futile +pedestal +rehab +##kat +bmg +kerman +res +fairbanks +jarrett +abstraction +saharan +##zek +1746 +procedural +clearer +kincaid +sash +luciano +##ffey +crunch +helmut +##vara +revolutionaries +##tute +creamy +leach +##mmon +1747 +permitting +nes +plight +wendell +##lese +contra +ts +clancy +ipa +mach +staples +autopsy +disturbances +nueva +karin +pontiac +##uding +proxy +venerable +haunt +leto +bergman +expands +##helm +wal +##pipe +canning +celine +cords +obesity +##enary +intrusion +planner +##phate +reasoned +sequencing +307 +harrow +##chon +##dora +marred +mcintyre +repay +tarzan +darting +248 +harrisburg +margarita +repulsed +##hur +##lding +belinda +hamburger +novo +compliant +runways +bingham +registrar +skyscraper +ic +cuthbert +improvisation +livelihood +##corp +##elial +admiring +##dened +sporadic +believer +casablanca +popcorn +##29 +asha +shovel +##bek +##dice +coiled +tangible +##dez +casper +elsie +resin +tenderness +rectory +##ivision +avail +sonar +##mori +boutique +##dier +guerre +bathed +upbringing +vaulted +sandals +blessings +##naut +##utnant +1680 +306 +foxes +pia +corrosion +hesitantly +confederates +crystalline +footprints +shapiro +tirana +valentin +drones +45th +microscope +shipments +texted +inquisition +wry +guernsey +unauthorized +resigning +760 +ripple +schubert +stu +reassure +felony +##ardo +brittle +koreans +##havan +##ives +dun +implicit +tyres +##aldi +##lth +magnolia +##ehan +##puri +##poulos +aggressively +fei +gr +familiarity +##poo +indicative +##trust +fundamentally +jimmie +overrun +395 +anchors +moans +##opus +britannia +armagh +##ggle +purposely +seizing +##vao +bewildered +mundane +avoidance +cosmopolitan +geometridae +quartermaster +caf +415 +chatter +engulfed +gleam +purge +##icate +juliette +jurisprudence +guerra +revisions +##bn +casimir +brew +##jm +1749 +clapton +cloudy +conde +hermitage +278 +simulations +torches +vincenzo +matteo +##rill +hidalgo +booming +westbound +accomplishment +tentacles +unaffected +##sius +annabelle +flopped +sloping +##litz +dreamer +interceptor +vu +##loh +consecration +copying +messaging +breaker +climates +hospitalized +1752 +torino +afternoons +winfield +witnessing +##teacher +breakers +choirs +sawmill +coldly +##ege +sipping +haste +uninhabited +conical +bibliography +pamphlets +severn +edict +##oca +deux +illnesses +grips +##pl +rehearsals +sis +thinkers +tame +##keepers +1690 +acacia +reformer +##osed +##rys +shuffling +##iring +##shima +eastbound +ionic +rhea +flees +littered +##oum +rocker +vomiting +groaning +champ +overwhelmingly +civilizations +paces +sloop +adoptive +##tish +skaters +##vres +aiding +mango +##joy +nikola +shriek +##ignon +pharmaceuticals +##mg +tuna +calvert +gustavo +stocked +yearbook +##urai +##mana +computed +subsp +riff +hanoi +kelvin +hamid +moors +pastures +summons +jihad +nectar +##ctors +bayou +untitled +pleasing +vastly +republics +intellect +##η +##ulio +##tou +crumbling +stylistic +sb +##ی +consolation +frequented +h₂o +walden +widows +##iens +404 +##ignment +chunks +improves +288 +grit +recited +##dev +snarl +sociological +##arte +##gul +inquired +##held +bruise +clube +consultancy +homogeneous +hornets +multiplication +pasta +prick +savior +##grin +##kou +##phile +yoon +##gara +grimes +vanishing +cheering +reacting +bn +distillery +##quisite +##vity +coe +dockyard +massif +##jord +escorts +voss +##valent +byte +chopped +hawke +illusions +workings +floats +##koto +##vac +kv +annapolis +madden +##onus +alvaro +noctuidae +##cum +##scopic +avenge +steamboat +forte +illustrates +erika +##trip +570 +dew +nationalities +bran +manifested +thirsty +diversified +muscled +reborn +##standing +arson +##lessness +##dran +##logram +##boys +##kushima +##vious +willoughby +##phobia +286 +alsace +dashboard +yuki +##chai +granville +myspace +publicized +tricked +##gang +adjective +##ater +relic +reorganisation +enthusiastically +indications +saxe +##lassified +consolidate +iec +padua +helplessly +ramps +renaming +regulars +pedestrians +accents +convicts +inaccurate +lowers +mana +##pati +barrie +bjp +outta +someplace +berwick +flanking +invoked +marrow +sparsely +excerpts +clothed +rei +##ginal +wept +##straße +##vish +alexa +excel +##ptive +membranes +aquitaine +creeks +cutler +sheppard +implementations +ns +##dur +fragrance +budge +concordia +magnesium +marcelo +##antes +gladly +vibrating +##rral +##ggles +montrose +##omba +lew +seamus +1630 +cocky +##ament +##uen +bjorn +##rrick +fielder +fluttering +##lase +methyl +kimberley +mcdowell +reductions +barbed +##jic +##tonic +aeronautical +condensed +distracting +##promising +huffed +##cala +##sle +claudius +invincible +missy +pious +balthazar +ci +##lang +butte +combo +orson +##dication +myriad +1707 +silenced +##fed +##rh +coco +netball +yourselves +##oza +clarify +heller +peg +durban +etudes +offender +roast +blackmail +curvature +##woods +vile +309 +illicit +suriname +##linson +overture +1685 +bubbling +gymnast +tucking +##mming +##ouin +maldives +##bala +gurney +##dda +##eased +##oides +backside +pinto +jars +racehorse +tending +##rdial +baronetcy +wiener +duly +##rke +barbarian +cupping +flawed +##thesis +bertha +pleistocene +puddle +swearing +##nob +##tically +fleeting +prostate +amulet +educating +##mined +##iti +##tler +75th +jens +respondents +analytics +cavaliers +papacy +raju +##iente +##ulum +##tip +funnel +271 +disneyland +##lley +sociologist +##iam +2500 +faulkner +louvre +menon +##dson +276 +##ower +afterlife +mannheim +peptide +referees +comedians +meaningless +##anger +##laise +fabrics +hurley +renal +sleeps +##bour +##icle +breakout +kristin +roadside +animator +clover +disdain +unsafe +redesign +##urity +firth +barnsley +portage +reset +narrows +268 +commandos +expansive +speechless +tubular +##lux +essendon +eyelashes +smashwords +##yad +##bang +##claim +craved +sprinted +chet +somme +astor +wrocław +orton +266 +bane +##erving +##uing +mischief +##amps +##sund +scaling +terre +##xious +impairment +offenses +undermine +moi +soy +contiguous +arcadia +inuit +seam +##tops +macbeth +rebelled +##icative +##iot +590 +elaborated +frs +uniformed +##dberg +259 +powerless +priscilla +stimulated +980 +qc +arboretum +frustrating +trieste +bullock +##nified +enriched +glistening +intern +##adia +locus +nouvelle +ollie +ike +lash +starboard +ee +tapestry +headlined +hove +rigged +##vite +pollock +##yme +thrive +clustered +cas +roi +gleamed +olympiad +##lino +pressured +regimes +##hosis +##lick +ripley +##ophone +kickoff +gallon +rockwell +##arable +crusader +glue +revolutions +scrambling +1714 +grover +##jure +englishman +aztec +263 +contemplating +coven +ipad +preach +triumphant +tufts +##esian +rotational +##phus +328 +falkland +##brates +strewn +clarissa +rejoin +environmentally +glint +banded +drenched +moat +albanians +johor +rr +maestro +malley +nouveau +shaded +taxonomy +v6 +adhere +bunk +airfields +##ritan +1741 +encompass +remington +tran +##erative +amelie +mazda +friar +morals +passions +##zai +breadth +vis +##hae +argus +burnham +caressing +insider +rudd +##imov +##mini +##rso +italianate +murderous +textual +wainwright +armada +bam +weave +timer +##taken +##nh +fra +##crest +ardent +salazar +taps +tunis +##ntino +allegro +gland +philanthropic +##chester +implication +##optera +esq +judas +noticeably +wynn +##dara +inched +indexed +crises +villiers +bandit +royalties +patterned +cupboard +interspersed +accessory +isla +kendrick +entourage +stitches +##esthesia +headwaters +##ior +interlude +distraught +draught +1727 +##basket +biased +sy +transient +triad +subgenus +adapting +kidd +shortstop +##umatic +dimly +spiked +mcleod +reprint +nellie +pretoria +windmill +##cek +singled +##mps +273 +reunite +##orous +747 +bankers +outlying +##omp +##ports +##tream +apologies +cosmetics +patsy +##deh +##ocks +##yson +bender +nantes +serene +##nad +lucha +mmm +323 +##cius +##gli +cmll +coinage +nestor +juarez +##rook +smeared +sprayed +twitching +sterile +irina +embodied +juveniles +enveloped +miscellaneous +cancers +dq +gulped +luisa +crested +swat +donegal +ref +##anov +##acker +hearst +mercantile +##lika +doorbell +ua +vicki +##alla +##som +bilbao +psychologists +stryker +sw +horsemen +turkmenistan +wits +##national +anson +mathew +screenings +##umb +rihanna +##agne +##nessy +aisles +##iani +##osphere +hines +kenton +saskatoon +tasha +truncated +##champ +##itan +mildred +advises +fredrik +interpreting +inhibitors +##athi +spectroscopy +##hab +##kong +karim +panda +##oia +##nail +##vc +conqueror +kgb +leukemia +##dity +arrivals +cheered +pisa +phosphorus +shielded +##riated +mammal +unitarian +urgently +chopin +sanitary +##mission +spicy +drugged +hinges +##tort +tipping +trier +impoverished +westchester +##caster +267 +epoch +nonstop +##gman +##khov +aromatic +centrally +cerro +##tively +##vio +billions +modulation +sedimentary +283 +facilitating +outrageous +goldstein +##eak +##kt +ld +maitland +penultimate +pollard +##dance +fleets +spaceship +vertebrae +##nig +alcoholism +als +recital +##bham +##ference +##omics +m2 +##bm +trois +##tropical +##в +commemorates +##meric +marge +##raction +1643 +670 +cosmetic +ravaged +##ige +catastrophe +eng +##shida +albrecht +arterial +bellamy +decor +harmon +##rde +bulbs +synchronized +vito +easiest +shetland +shielding +wnba +##glers +##ssar +##riam +brianna +cumbria +##aceous +##rard +cores +thayer +##nsk +brood +hilltop +luminous +carts +keynote +larkin +logos +##cta +##ا +##mund +##quay +lilith +tinted +277 +wrestle +mobilization +##uses +sequential +siam +bloomfield +takahashi +274 +##ieving +presenters +ringo +blazed +witty +##oven +##ignant +devastation +haydn +harmed +newt +therese +##peed +gershwin +molina +rabbis +sudanese +001 +innate +restarted +##sack +##fus +slices +wb +##shah +enroll +hypothetical +hysterical +1743 +fabio +indefinite +warped +##hg +exchanging +525 +unsuitable +##sboro +gallo +1603 +bret +cobalt +homemade +##hunter +mx +operatives +##dhar +terraces +durable +latch +pens +whorls +##ctuated +##eaux +billing +ligament +succumbed +##gly +regulators +spawn +##brick +##stead +filmfare +rochelle +##nzo +1725 +circumstance +saber +supplements +##nsky +##tson +crowe +wellesley +carrot +##9th +##movable +primate +drury +sincerely +topical +##mad +##rao +callahan +kyiv +smarter +tits +undo +##yeh +announcements +anthologies +barrio +nebula +##islaus +##shaft +##tyn +bodyguards +2021 +assassinate +barns +emmett +scully +##mah +##yd +##eland +##tino +##itarian +demoted +gorman +lashed +prized +adventist +writ +##gui +alla +invertebrates +##ausen +1641 +amman +1742 +align +healy +redistribution +##gf +##rize +insulation +##drop +adherents +hezbollah +vitro +ferns +yanking +269 +php +registering +uppsala +cheerleading +confines +mischievous +tully +##ross +49th +docked +roam +stipulated +pumpkin +##bry +prompt +##ezer +blindly +shuddering +craftsmen +frail +scented +katharine +scramble +shaggy +sponge +helix +zaragoza +279 +##52 +43rd +backlash +fontaine +seizures +posse +cowan +nonfiction +telenovela +wwii +hammered +undone +##gpur +encircled +irs +##ivation +artefacts +oneself +searing +smallpox +##belle +##osaurus +shandong +breached +upland +blushing +rankin +infinitely +psyche +tolerated +docking +evicted +##col +unmarked +##lving +gnome +lettering +litres +musique +##oint +benevolent +##jal +blackened +##anna +mccall +racers +tingle +##ocene +##orestation +introductions +radically +292 +##hiff +##باد +1610 +1739 +munchen +plead +##nka +condo +scissors +##sight +##tens +apprehension +##cey +##yin +hallmark +watering +formulas +sequels +##llas +aggravated +bae +commencing +##building +enfield +prohibits +marne +vedic +civilized +euclidean +jagger +beforehand +blasts +dumont +##arney +##nem +740 +conversions +hierarchical +rios +simulator +##dya +##lellan +hedges +oleg +thrusts +shadowed +darby +maximize +1744 +gregorian +##nded +##routed +sham +unspecified +##hog +emory +factual +##smo +##tp +fooled +##rger +ortega +wellness +marlon +##oton +##urance +casket +keating +ley +enclave +##ayan +char +influencing +jia +##chenko +412 +ammonia +erebidae +incompatible +violins +cornered +##arat +grooves +astronauts +columbian +rampant +fabrication +kyushu +mahmud +vanish +##dern +mesopotamia +##lete +ict +##rgen +caspian +kenji +pitted +##vered +999 +grimace +roanoke +tchaikovsky +twinned +##analysis +##awan +xinjiang +arias +clemson +kazakh +sizable +1662 +##khand +##vard +plunge +tatum +vittorio +##nden +cholera +##dana +##oper +bracing +indifference +projectile +superliga +##chee +realises +upgrading +299 +porte +retribution +##vies +nk +stil +##resses +ama +bureaucracy +blackberry +bosch +testosterone +collapses +greer +##pathic +ioc +fifties +malls +##erved +bao +baskets +adolescents +siegfried +##osity +##tosis +mantra +detecting +existent +fledgling +##cchi +dissatisfied +gan +telecommunication +mingled +sobbed +6000 +controversies +outdated +taxis +##raus +fright +slams +##lham +##fect +##tten +detectors +fetal +tanned +##uw +fray +goth +olympian +skipping +mandates +scratches +sheng +unspoken +hyundai +tracey +hotspur +restrictive +##buch +americana +mundo +##bari +burroughs +diva +vulcan +##6th +distinctions +thumping +##ngen +mikey +sheds +fide +rescues +springsteen +vested +valuation +##ece +##ely +pinnacle +rake +sylvie +##edo +almond +quivering +##irus +alteration +faltered +##wad +51st +hydra +ticked +##kato +recommends +##dicated +antigua +arjun +stagecoach +wilfred +trickle +pronouns +##pon +aryan +nighttime +##anian +gall +pea +stitch +##hei +leung +milos +##dini +eritrea +nexus +starved +snowfall +kant +parasitic +cot +discus +hana +strikers +appleton +kitchens +##erina +##partisan +##itha +##vius +disclose +metis +##channel +1701 +tesla +##vera +fitch +1735 +blooded +##tila +decimal +##tang +##bai +cyclones +eun +bottled +peas +pensacola +basha +bolivian +crabs +boil +lanterns +partridge +roofed +1645 +necks +##phila +opined +patting +##kla +##lland +chuckles +volta +whereupon +##nche +devout +euroleague +suicidal +##dee +inherently +involuntary +knitting +nasser +##hide +puppets +colourful +courageous +southend +stills +miraculous +hodgson +richer +rochdale +ethernet +greta +uniting +prism +umm +##haya +##itical +##utation +deterioration +pointe +prowess +##ropriation +lids +scranton +billings +subcontinent +##koff +##scope +brute +kellogg +psalms +degraded +##vez +stanisław +##ructured +ferreira +pun +astonishing +gunnar +##yat +arya +prc +gottfried +##tight +excursion +##ographer +dina +##quil +##nare +huffington +illustrious +wilbur +gundam +verandah +##zard +naacp +##odle +constructive +fjord +kade +##naud +generosity +thrilling +baseline +cayman +frankish +plastics +accommodations +zoological +##fting +cedric +qb +motorized +##dome +##otted +squealed +tackled +canucks +budgets +situ +asthma +dail +gabled +grasslands +whimpered +writhing +judgments +##65 +minnie +pv +##carbon +bananas +grille +domes +monique +odin +maguire +markham +tierney +##estra +##chua +libel +poke +speedy +atrium +laval +notwithstanding +##edly +fai +kala +##sur +robb +##sma +listings +luz +supplementary +tianjin +##acing +enzo +jd +ric +scanner +croats +transcribed +##49 +arden +cv +##hair +##raphy +##lver +##uy +357 +seventies +staggering +alam +horticultural +hs +regression +timbers +blasting +##ounded +montagu +manipulating +##cit +catalytic +1550 +troopers +##meo +condemnation +fitzpatrick +##oire +##roved +inexperienced +1670 +castes +##lative +outing +314 +dubois +flicking +quarrel +ste +learners +1625 +iq +whistled +##class +282 +classify +tariffs +temperament +355 +folly +liszt +##yles +immersed +jordanian +ceasefire +apparel +extras +maru +fished +##bio +harta +stockport +assortment +craftsman +paralysis +transmitters +##cola +blindness +##wk +fatally +proficiency +solemnly +##orno +repairing +amore +groceries +ultraviolet +##chase +schoolhouse +##tua +resurgence +nailed +##otype +##× +ruse +saliva +diagrams +##tructing +albans +rann +thirties +1b +antennas +hilarious +cougars +paddington +stats +##eger +breakaway +ipod +reza +authorship +prohibiting +scoffed +##etz +##ttle +conscription +defected +trondheim +##fires +ivanov +keenan +##adan +##ciful +##fb +##slow +locating +##ials +##tford +cadiz +basalt +blankly +interned +rags +rattling +##tick +carpathian +reassured +sync +bum +guildford +iss +staunch +##onga +astronomers +sera +sofie +emergencies +susquehanna +##heard +duc +mastery +vh1 +williamsburg +bayer +buckled +craving +##khan +##rdes +bloomington +##write +alton +barbecue +##bians +justine +##hri +##ndt +delightful +smartphone +newtown +photon +retrieval +peugeot +hissing +##monium +##orough +flavors +lighted +relaunched +tainted +##games +##lysis +anarchy +microscopic +hopping +adept +evade +evie +##beau +inhibit +sinn +adjustable +hurst +intuition +wilton +cisco +44th +lawful +lowlands +stockings +thierry +##dalen +##hila +##nai +fates +prank +tb +maison +lobbied +provocative +1724 +4a +utopia +##qual +carbonate +gujarati +purcell +##rford +curtiss +##mei +overgrown +arenas +mediation +swallows +##rnik +respectful +turnbull +##hedron +##hope +alyssa +ozone +##ʻi +ami +gestapo +johansson +snooker +canteen +cuff +declines +empathy +stigma +##ags +##iner +##raine +taxpayers +gui +volga +##wright +##copic +lifespan +overcame +tattooed +enactment +giggles +##ador +##camp +barrington +bribe +obligatory +orbiting +peng +##enas +elusive +sucker +##vating +cong +hardship +empowered +anticipating +estrada +cryptic +greasy +detainees +planck +sudbury +plaid +dod +marriott +kayla +##ears +##vb +##zd +mortally +##hein +cognition +radha +319 +liechtenstein +meade +richly +argyle +harpsichord +liberalism +trumpets +lauded +tyrant +salsa +tiled +lear +promoters +reused +slicing +trident +##chuk +##gami +##lka +cantor +checkpoint +##points +gaul +leger +mammalian +##tov +##aar +##schaft +doha +frenchman +nirvana +##vino +delgado +headlining +##eron +##iography +jug +tko +1649 +naga +intersections +##jia +benfica +nawab +##suka +ashford +gulp +##deck +##vill +##rug +brentford +frazier +pleasures +dunne +potsdam +shenzhen +dentistry +##tec +flanagan +##dorff +##hear +chorale +dinah +prem +quezon +##rogated +relinquished +sutra +terri +##pani +flaps +##rissa +poly +##rnet +homme +aback +##eki +linger +womb +##kson +##lewood +doorstep +orthodoxy +threaded +westfield +##rval +dioceses +fridays +subsided +##gata +loyalists +##biotic +##ettes +letterman +lunatic +prelate +tenderly +invariably +souza +thug +winslow +##otide +furlongs +gogh +jeopardy +##runa +pegasus +##umble +humiliated +standalone +tagged +##roller +freshmen +klan +##bright +attaining +initiating +transatlantic +logged +viz +##uance +1723 +combatants +intervening +stephane +chieftain +despised +grazed +317 +cdc +galveston +godzilla +macro +simulate +##planes +parades +##esses +960 +##ductive +##unes +equator +overdose +##cans +##hosh +##lifting +joshi +epstein +sonora +treacherous +aquatics +manchu +responsive +##sation +supervisory +##christ +##llins +##ibar +##balance +##uso +kimball +karlsruhe +mab +##emy +ignores +phonetic +reuters +spaghetti +820 +almighty +danzig +rumbling +tombstone +designations +lured +outset +##felt +supermarkets +##wt +grupo +kei +kraft +susanna +##blood +comprehension +genealogy +##aghan +##verted +redding +##ythe +1722 +bowing +##pore +##roi +lest +sharpened +fulbright +valkyrie +sikhs +##unds +swans +bouquet +merritt +##tage +##venting +commuted +redhead +clerks +leasing +cesare +dea +hazy +##vances +fledged +greenfield +servicemen +##gical +armando +blackout +dt +sagged +downloadable +intra +potion +pods +##4th +##mism +xp +attendants +gambia +stale +##ntine +plump +asteroids +rediscovered +buds +flea +hive +##neas +1737 +classifications +debuts +##eles +olympus +scala +##eurs +##gno +##mute +hummed +sigismund +visuals +wiggled +await +pilasters +clench +sulfate +##ances +bellevue +enigma +trainee +snort +##sw +clouded +denim +##rank +##rder +churning +hartman +lodges +riches +sima +##missible +accountable +socrates +regulates +mueller +##cr +1702 +avoids +solids +himalayas +nutrient +pup +##jevic +squat +fades +nec +##lates +##pina +##rona +##ου +privateer +tequila +##gative +##mpton +apt +hornet +immortals +##dou +asturias +cleansing +dario +##rries +##anta +etymology +servicing +zhejiang +##venor +##nx +horned +erasmus +rayon +relocating +£10 +##bags +escalated +promenade +stubble +2010s +artisans +axial +liquids +mora +sho +yoo +##tsky +bundles +oldies +##nally +notification +bastion +##ths +sparkle +##lved +1728 +leash +pathogen +highs +##hmi +immature +880 +gonzaga +ignatius +mansions +monterrey +sweets +bryson +##loe +polled +regatta +brightest +pei +rosy +squid +hatfield +payroll +addict +meath +cornerback +heaviest +lodging +##mage +capcom +rippled +##sily +barnet +mayhem +ymca +snuggled +rousseau +##cute +blanchard +284 +fragmented +leighton +chromosomes +risking +##md +##strel +##utter +corinne +coyotes +cynical +hiroshi +yeomanry +##ractive +ebook +grading +mandela +plume +agustin +magdalene +##rkin +bea +femme +trafford +##coll +##lun +##tance +52nd +fourier +upton +##mental +camilla +gust +iihf +islamabad +longevity +##kala +feldman +netting +##rization +endeavour +foraging +mfa +orr +##open +greyish +contradiction +graz +##ruff +handicapped +marlene +tweed +oaxaca +spp +campos +miocene +pri +configured +cooks +pluto +cozy +pornographic +##entes +70th +fairness +glided +jonny +lynne +rounding +sired +##emon +##nist +remade +uncover +##mack +complied +lei +newsweek +##jured +##parts +##enting +##pg +293 +finer +guerrillas +athenian +deng +disused +stepmother +accuse +gingerly +seduction +521 +confronting +##walker +##going +gora +nostalgia +sabres +virginity +wrenched +##minated +syndication +wielding +eyre +##56 +##gnon +##igny +behaved +taxpayer +sweeps +##growth +childless +gallant +##ywood +amplified +geraldine +scrape +##ffi +babylonian +fresco +##rdan +##kney +##position +1718 +restricting +tack +fukuoka +osborn +selector +partnering +##dlow +318 +gnu +kia +tak +whitley +gables +##54 +##mania +mri +softness +immersion +##bots +##evsky +1713 +chilling +insignificant +pcs +##uis +elites +lina +purported +supplemental +teaming +##americana +##dding +##inton +proficient +rouen +##nage +##rret +niccolo +selects +##bread +fluffy +1621 +gruff +knotted +mukherjee +polgara +thrash +nicholls +secluded +smoothing +thru +corsica +loaf +whitaker +inquiries +##rrier +##kam +indochina +289 +marlins +myles +peking +##tea +extracts +pastry +superhuman +connacht +vogel +##ditional +##het +##udged +##lash +gloss +quarries +refit +teaser +##alic +##gaon +20s +materialized +sling +camped +pickering +tung +tracker +pursuant +##cide +cranes +soc +##cini +##typical +##viere +anhalt +overboard +workout +chores +fares +orphaned +stains +##logie +fenton +surpassing +joyah +triggers +##itte +grandmaster +##lass +##lists +clapping +fraudulent +ledger +nagasaki +##cor +##nosis +##tsa +eucalyptus +tun +##icio +##rney +##tara +dax +heroism +ina +wrexham +onboard +unsigned +##dates +moshe +galley +winnie +droplets +exiles +praises +watered +noodles +##aia +fein +adi +leland +multicultural +stink +bingo +comets +erskine +modernized +canned +constraint +domestically +chemotherapy +featherweight +stifled +##mum +darkly +irresistible +refreshing +hasty +isolate +##oys +kitchener +planners +##wehr +cages +yarn +implant +toulon +elects +childbirth +yue +##lind +##lone +cn +rightful +sportsman +junctions +remodeled +specifies +##rgh +291 +##oons +complimented +##urgent +lister +ot +##logic +bequeathed +cheekbones +fontana +gabby +##dial +amadeus +corrugated +maverick +resented +triangles +##hered +##usly +nazareth +tyrol +1675 +assent +poorer +sectional +aegean +##cous +296 +nylon +ghanaian +##egorical +##weig +cushions +forbid +fusiliers +obstruction +somerville +##scia +dime +earrings +elliptical +leyte +oder +polymers +timmy +atm +midtown +piloted +settles +continual +externally +mayfield +##uh +enrichment +henson +keane +persians +1733 +benji +braden +pep +324 +##efe +contenders +pepsi +valet +##isches +298 +##asse +##earing +goofy +stroll +##amen +authoritarian +occurrences +adversary +ahmedabad +tangent +toppled +dorchester +1672 +modernism +marxism +islamist +charlemagne +exponential +racks +unicode +brunette +mbc +pic +skirmish +##bund +##lad +##powered +##yst +hoisted +messina +shatter +##ctum +jedi +vantage +##music +##neil +clemens +mahmoud +corrupted +authentication +lowry +nils +##washed +omnibus +wounding +jillian +##itors +##opped +serialized +narcotics +handheld +##arm +##plicity +intersecting +stimulating +##onis +crate +fellowships +hemingway +casinos +climatic +fordham +copeland +drip +beatty +leaflets +robber +brothel +madeira +##hedral +sphinx +ultrasound +##vana +valor +forbade +leonid +villas +##aldo +duane +marquez +##cytes +disadvantaged +forearms +kawasaki +reacts +consular +lax +uncles +uphold +##hopper +concepcion +dorsey +lass +##izan +arching +passageway +1708 +researches +tia +internationals +##graphs +##opers +distinguishes +javanese +divert +##uven +plotted +##listic +##rwin +##erik +##tify +affirmative +signifies +validation +##bson +kari +felicity +georgina +zulu +##eros +##rained +##rath +overcoming +##dot +argyll +##rbin +1734 +chiba +ratification +windy +earls +parapet +##marks +hunan +pristine +astrid +punta +##gart +brodie +##kota +##oder +malaga +minerva +rouse +##phonic +bellowed +pagoda +portals +reclamation +##gur +##odies +##⁄₄ +parentheses +quoting +allergic +palette +showcases +benefactor +heartland +nonlinear +##tness +bladed +cheerfully +scans +##ety +##hone +1666 +girlfriends +pedersen +hiram +sous +##liche +##nator +1683 +##nery +##orio +##umen +bobo +primaries +smiley +##cb +unearthed +uniformly +fis +metadata +1635 +ind +##oted +recoil +##titles +##tura +##ια +406 +hilbert +jamestown +mcmillan +tulane +seychelles +##frid +antics +coli +fated +stucco +##grants +1654 +bulky +accolades +arrays +caledonian +carnage +optimism +puebla +##tative +##cave +enforcing +rotherham +seo +dunlop +aeronautics +chimed +incline +zoning +archduke +hellenistic +##oses +##sions +candi +thong +##ople +magnate +rustic +##rsk +projective +slant +##offs +danes +hollis +vocalists +##ammed +congenital +contend +gesellschaft +##ocating +##pressive +douglass +quieter +##cm +##kshi +howled +salim +spontaneously +townsville +buena +southport +##bold +kato +1638 +faerie +stiffly +##vus +##rled +297 +flawless +realising +taboo +##7th +bytes +straightening +356 +jena +##hid +##rmin +cartwright +berber +bertram +soloists +411 +noses +417 +coping +fission +hardin +inca +##cen +1717 +mobilized +vhf +##raf +biscuits +curate +##85 +##anial +331 +gaunt +neighbourhoods +1540 +##abas +blanca +bypassed +sockets +behold +coincidentally +##bane +nara +shave +splinter +terrific +##arion +##erian +commonplace +juris +redwood +waistband +boxed +caitlin +fingerprints +jennie +naturalized +##ired +balfour +craters +jody +bungalow +hugely +quilt +glitter +pigeons +undertaker +bulging +constrained +goo +##sil +##akh +assimilation +reworked +##person +persuasion +##pants +felicia +##cliff +##ulent +1732 +explodes +##dun +##inium +##zic +lyman +vulture +hog +overlook +begs +northwards +ow +spoil +##urer +fatima +favorably +accumulate +sargent +sorority +corresponded +dispersal +kochi +toned +##imi +##lita +internacional +newfound +##agger +##lynn +##rigue +booths +peanuts +##eborg +medicare +muriel +nur +##uram +crates +millennia +pajamas +worsened +##breakers +jimi +vanuatu +yawned +##udeau +carousel +##hony +hurdle +##ccus +##mounted +##pod +rv +##eche +airship +ambiguity +compulsion +recapture +##claiming +arthritis +##osomal +1667 +asserting +ngc +sniffing +dade +discontent +glendale +ported +##amina +defamation +rammed +##scent +fling +livingstone +##fleet +875 +##ppy +apocalyptic +comrade +lcd +##lowe +cessna +eine +persecuted +subsistence +demi +hoop +reliefs +710 +coptic +progressing +stemmed +perpetrators +1665 +priestess +##nio +dobson +ebony +rooster +itf +tortricidae +##bbon +##jian +cleanup +##jean +##øy +1721 +eighties +taxonomic +holiness +##hearted +##spar +antilles +showcasing +stabilized +##nb +gia +mascara +michelangelo +dawned +##uria +##vinsky +extinguished +fitz +grotesque +£100 +##fera +##loid +##mous +barges +neue +throbbed +cipher +johnnie +##a1 +##mpt +outburst +##swick +spearheaded +administrations +c1 +heartbreak +pixels +pleasantly +##enay +lombardy +plush +##nsed +bobbie +##hly +reapers +tremor +xiang +minogue +substantive +hitch +barak +##wyl +kwan +##encia +910 +obscene +elegance +indus +surfer +bribery +conserve +##hyllum +##masters +horatio +##fat +apes +rebound +psychotic +##pour +iteration +##mium +##vani +botanic +horribly +antiques +dispose +paxton +##hli +##wg +timeless +1704 +disregard +engraver +hounds +##bau +##version +looted +uno +facilitates +groans +masjid +rutland +antibody +disqualification +decatur +footballers +quake +slacks +48th +rein +scribe +stabilize +commits +exemplary +tho +##hort +##chison +pantry +traversed +##hiti +disrepair +identifiable +vibrated +baccalaureate +##nnis +csa +interviewing +##iensis +##raße +greaves +wealthiest +343 +classed +jogged +£5 +##58 +##atal +illuminating +knicks +respecting +##uno +scrubbed +##iji +##dles +kruger +moods +growls +raider +silvia +chefs +kam +vr +cree +percival +##terol +gunter +counterattack +defiant +henan +ze +##rasia +##riety +equivalence +submissions +##fra +##thor +bautista +mechanically +##heater +cornice +herbal +templar +##mering +outputs +ruining +ligand +renumbered +extravagant +mika +blockbuster +eta +insurrection +##ilia +darkening +ferocious +pianos +strife +kinship +##aer +melee +##anor +##iste +##may +##oue +decidedly +weep +##jad +##missive +##ppel +354 +puget +unease +##gnant +1629 +hammering +kassel +ob +wessex +##lga +bromwich +egan +paranoia +utilization +##atable +##idad +contradictory +provoke +##ols +##ouring +##tangled +knesset +##very +##lette +plumbing +##sden +##¹ +greensboro +occult +sniff +338 +zev +beaming +gamer +haggard +mahal +##olt +##pins +mendes +utmost +briefing +gunnery +##gut +##pher +##zh +##rok +1679 +khalifa +sonya +##boot +principals +urbana +wiring +##liffe +##minating +##rrado +dahl +nyu +skepticism +np +townspeople +ithaca +lobster +somethin +##fur +##arina +##−1 +freighter +zimmerman +biceps +contractual +##herton +amend +hurrying +subconscious +##anal +336 +meng +clermont +spawning +##eia +##lub +dignitaries +impetus +snacks +spotting +twigs +##bilis +##cz +##ouk +libertadores +nic +skylar +##aina +##firm +gustave +asean +##anum +dieter +legislatures +flirt +bromley +trolls +umar +##bbies +##tyle +blah +parc +bridgeport +crank +negligence +##nction +46th +constantin +molded +bandages +seriousness +00pm +siegel +carpets +compartments +upbeat +statehood +##dner +##edging +marko +730 +platt +##hane +paving +##iy +1738 +abbess +impatience +limousine +nbl +##talk +441 +lucille +mojo +nightfall +robbers +##nais +karel +brisk +calves +replicate +ascribed +telescopes +##olf +intimidated +##reen +ballast +specialization +##sit +aerodynamic +caliphate +rainer +visionary +##arded +epsilon +##aday +##onte +aggregation +auditory +boosted +reunification +kathmandu +loco +robyn +402 +acknowledges +appointing +humanoid +newell +redeveloped +restraints +##tained +barbarians +chopper +1609 +italiana +##lez +##lho +investigates +wrestlemania +##anies +##bib +690 +##falls +creaked +dragoons +gravely +minions +stupidity +volley +##harat +##week +musik +##eries +##uously +fungal +massimo +semantics +malvern +##ahl +##pee +discourage +embryo +imperialism +1910s +profoundly +##ddled +jiangsu +sparkled +stat +##holz +sweatshirt +tobin +##iction +sneered +##cheon +##oit +brit +causal +smyth +##neuve +diffuse +perrin +silvio +##ipes +##recht +detonated +iqbal +selma +##nism +##zumi +roasted +##riders +tay +##ados +##mament +##mut +##rud +840 +completes +nipples +cfa +flavour +hirsch +##laus +calderon +sneakers +moravian +##ksha +1622 +rq +294 +##imeters +bodo +##isance +##pre +##ronia +anatomical +excerpt +##lke +dh +kunst +##tablished +##scoe +biomass +panted +unharmed +gael +housemates +montpellier +##59 +coa +rodents +tonic +hickory +singleton +##taro +451 +1719 +aldo +breaststroke +dempsey +och +rocco +##cuit +merton +dissemination +midsummer +serials +##idi +haji +polynomials +##rdon +gs +enoch +prematurely +shutter +taunton +£3 +##grating +##inates +archangel +harassed +##asco +326 +archway +dazzling +##ecin +1736 +sumo +wat +##kovich +1086 +honneur +##ently +##nostic +##ttal +##idon +1605 +403 +1716 +blogger +rents +##gnan +hires +##ikh +##dant +howie +##rons +handler +retracted +shocks +1632 +arun +duluth +kepler +trumpeter +##lary +peeking +seasoned +trooper +##mara +laszlo +##iciencies +##rti +heterosexual +##inatory +##ssion +indira +jogging +##inga +##lism +beit +dissatisfaction +malice +##ately +nedra +peeling +##rgeon +47th +stadiums +475 +vertigo +##ains +iced +restroom +##plify +##tub +illustrating +pear +##chner +##sibility +inorganic +rappers +receipts +watery +##kura +lucinda +##oulos +reintroduced +##8th +##tched +gracefully +saxons +nutritional +wastewater +rained +favourites +bedrock +fisted +hallways +likeness +upscale +##lateral +1580 +blinds +prequel +##pps +##tama +deter +humiliating +restraining +tn +vents +1659 +laundering +recess +rosary +tractors +coulter +federer +##ifiers +##plin +persistence +##quitable +geschichte +pendulum +quakers +##beam +bassett +pictorial +buffet +koln +##sitor +drills +reciprocal +shooters +##57 +##cton +##tees +converge +pip +dmitri +donnelly +yamamoto +aqua +azores +demographics +hypnotic +spitfire +suspend +wryly +roderick +##rran +sebastien +##asurable +mavericks +##fles +##200 +himalayan +prodigy +##iance +transvaal +demonstrators +handcuffs +dodged +mcnamara +sublime +1726 +crazed +##efined +##till +ivo +pondered +reconciled +shrill +sava +##duk +bal +cad +heresy +jaipur +goran +##nished +341 +lux +shelly +whitehall +##hre +israelis +peacekeeping +##wled +1703 +demetrius +ousted +##arians +##zos +beale +anwar +backstroke +raged +shrinking +cremated +##yck +benign +towing +wadi +darmstadt +landfill +parana +soothe +colleen +sidewalks +mayfair +tumble +hepatitis +ferrer +superstructure +##gingly +##urse +##wee +anthropological +translators +##mies +closeness +hooves +##pw +mondays +##roll +##vita +landscaping +##urized +purification +sock +thorns +thwarted +jalan +tiberius +##taka +saline +##rito +confidently +khyber +sculptors +##ij +brahms +hammersmith +inspectors +battista +fivb +fragmentation +hackney +##uls +arresting +exercising +antoinette +bedfordshire +##zily +dyed +##hema +1656 +racetrack +variability +##tique +1655 +austrians +deteriorating +madman +theorists +aix +lehman +weathered +1731 +decreed +eruptions +1729 +flaw +quinlan +sorbonne +flutes +nunez +1711 +adored +downwards +fable +rasped +1712 +moritz +mouthful +renegade +shivers +stunts +dysfunction +restrain +translit +327 +pancakes +##avio +##cision +##tray +351 +vial +##lden +bain +##maid +##oxide +chihuahua +malacca +vimes +##rba +##rnier +1664 +donnie +plaques +##ually +337 +bangs +floppy +huntsville +loretta +nikolay +##otte +eater +handgun +ubiquitous +##hett +eras +zodiac +1634 +##omorphic +1820s +##zog +cochran +##bula +##lithic +warring +##rada +dalai +excused +blazers +mcconnell +reeling +bot +este +##abi +geese +hoax +taxon +##bla +guitarists +##icon +condemning +hunts +inversion +moffat +taekwondo +##lvis +1624 +stammered +##rest +##rzy +sousa +fundraiser +marylebone +navigable +uptown +cabbage +daniela +salman +shitty +whimper +##kian +##utive +programmers +protections +rm +##rmi +##rued +forceful +##enes +fuss +##tao +##wash +brat +oppressive +reykjavik +spartak +ticking +##inkles +##kiewicz +adolph +horst +maui +protege +straighten +cpc +landau +concourse +clements +resultant +##ando +imaginative +joo +reactivated +##rem +##ffled +##uising +consultative +##guide +flop +kaitlyn +mergers +parenting +somber +##vron +supervise +vidhan +##imum +courtship +exemplified +harmonies +medallist +refining +##rrow +##ка +amara +##hum +780 +goalscorer +sited +overshadowed +rohan +displeasure +secretive +multiplied +osman +##orth +engravings +padre +##kali +##veda +miniatures +mis +##yala +clap +pali +rook +##cana +1692 +57th +antennae +astro +oskar +1628 +bulldog +crotch +hackett +yucatan +##sure +amplifiers +brno +ferrara +migrating +##gree +thanking +turing +##eza +mccann +ting +andersson +onslaught +gaines +ganga +incense +standardization +##mation +sentai +scuba +stuffing +turquoise +waivers +alloys +##vitt +regaining +vaults +##clops +##gizing +digger +furry +memorabilia +probing +##iad +payton +rec +deutschland +filippo +opaque +seamen +zenith +afrikaans +##filtration +disciplined +inspirational +##merie +banco +confuse +grafton +tod +##dgets +championed +simi +anomaly +biplane +##ceptive +electrode +##para +1697 +cleavage +crossbow +swirl +informant +##lars +##osta +afi +bonfire +spec +##oux +lakeside +slump +##culus +##lais +##qvist +##rrigan +1016 +facades +borg +inwardly +cervical +xl +pointedly +050 +stabilization +##odon +chests +1699 +hacked +ctv +orthogonal +suzy +##lastic +gaulle +jacobite +rearview +##cam +##erted +ashby +##drik +##igate +##mise +##zbek +affectionately +canine +disperse +latham +##istles +##ivar +spielberg +##orin +##idium +ezekiel +cid +##sg +durga +middletown +##cina +customized +frontiers +harden +##etano +##zzy +1604 +bolsheviks +##66 +coloration +yoko +##bedo +briefs +slabs +debra +liquidation +plumage +##oin +blossoms +dementia +subsidy +1611 +proctor +relational +jerseys +parochial +ter +##ici +esa +peshawar +cavalier +loren +cpi +idiots +shamrock +1646 +dutton +malabar +mustache +##endez +##ocytes +referencing +terminates +marche +yarmouth +##sop +acton +mated +seton +subtly +baptised +beige +extremes +jolted +kristina +telecast +##actic +safeguard +waldo +##baldi +##bular +endeavors +sloppy +subterranean +##ensburg +##itung +delicately +pigment +tq +##scu +1626 +##ound +collisions +coveted +herds +##personal +##meister +##nberger +chopra +##ricting +abnormalities +defective +galician +lucie +##dilly +alligator +likened +##genase +burundi +clears +complexion +derelict +deafening +diablo +fingered +champaign +dogg +enlist +isotope +labeling +mrna +##erre +brilliance +marvelous +##ayo +1652 +crawley +ether +footed +dwellers +deserts +hamish +rubs +warlock +skimmed +##lizer +870 +buick +embark +heraldic +irregularities +##ajan +kiara +##kulam +##ieg +antigen +kowalski +##lge +oakley +visitation +##mbit +vt +##suit +1570 +murderers +##miento +##rites +chimneys +##sling +condemn +custer +exchequer +havre +##ghi +fluctuations +##rations +dfb +hendricks +vaccines +##tarian +nietzsche +biking +juicy +##duced +brooding +scrolling +selangor +##ragan +352 +annum +boomed +seminole +sugarcane +##dna +departmental +dismissing +innsbruck +arteries +ashok +batavia +daze +kun +overtook +##rga +##tlan +beheaded +gaddafi +holm +electronically +faulty +galilee +fractures +kobayashi +##lized +gunmen +magma +aramaic +mala +eastenders +inference +messengers +bf +##qu +407 +bathrooms +##vere +1658 +flashbacks +ideally +misunderstood +##jali +##weather +mendez +##grounds +505 +uncanny +##iii +1709 +friendships +##nbc +sacrament +accommodated +reiterated +logistical +pebbles +thumped +##escence +administering +decrees +drafts +##flight +##cased +##tula +futuristic +picket +intimidation +winthrop +##fahan +interfered +339 +afar +francoise +morally +uta +cochin +croft +dwarfs +##bruck +##dents +##nami +biker +##hner +##meral +nano +##isen +##ometric +##pres +##ан +brightened +meek +parcels +securely +gunners +##jhl +##zko +agile +hysteria +##lten +##rcus +bukit +champs +chevy +cuckoo +leith +sadler +theologians +welded +##section +1663 +jj +plurality +xander +##rooms +##formed +shredded +temps +intimately +pau +tormented +##lok +##stellar +1618 +charred +ems +essen +##mmel +alarms +spraying +ascot +blooms +twinkle +##abia +##apes +internment +obsidian +##chaft +snoop +##dav +##ooping +malibu +##tension +quiver +##itia +hays +mcintosh +travers +walsall +##ffie +1623 +beverley +schwarz +plunging +structurally +m3 +rosenthal +vikram +##tsk +770 +ghz +##onda +##tiv +chalmers +groningen +pew +reckon +unicef +##rvis +55th +##gni +1651 +sulawesi +avila +cai +metaphysical +screwing +turbulence +##mberg +augusto +samba +56th +baffled +momentary +toxin +##urian +##wani +aachen +condoms +dali +steppe +##3d +##app +##oed +##year +adolescence +dauphin +electrically +inaccessible +microscopy +nikita +##ega +atv +##cel +##enter +##oles +##oteric +##ы +accountants +punishments +wrongly +bribes +adventurous +clinch +flinders +southland +##hem +##kata +gough +##ciency +lads +soared +##ה +undergoes +deformation +outlawed +rubbish +##arus +##mussen +##nidae +##rzburg +arcs +##ingdon +##tituted +1695 +wheelbase +wheeling +bombardier +campground +zebra +##lices +##oj +##bain +lullaby +##ecure +donetsk +wylie +grenada +##arding +##ης +squinting +eireann +opposes +##andra +maximal +runes +##broken +##cuting +##iface +##ror +##rosis +additive +britney +adultery +triggering +##drome +detrimental +aarhus +containment +jc +swapped +vichy +##ioms +madly +##oric +##rag +brant +##ckey +##trix +1560 +1612 +broughton +rustling +##stems +##uder +asbestos +mentoring +##nivorous +finley +leaps +##isan +apical +pry +slits +substitutes +##dict +intuitive +fantasia +insistent +unreasonable +##igen +##vna +domed +hannover +margot +ponder +##zziness +impromptu +jian +lc +rampage +stemming +##eft +andrey +gerais +whichever +amnesia +appropriated +anzac +clicks +modifying +ultimatum +cambrian +maids +verve +yellowstone +##mbs +conservatoire +##scribe +adherence +dinners +spectra +imperfect +mysteriously +sidekick +tatar +tuba +##aks +##ifolia +distrust +##athan +##zle +c2 +ronin +zac +##pse +celaena +instrumentalist +scents +skopje +##mbling +comical +compensated +vidal +condor +intersect +jingle +wavelengths +##urrent +mcqueen +##izzly +carp +weasel +422 +kanye +militias +postdoctoral +eugen +gunslinger +##ɛ +faux +hospice +##for +appalled +derivation +dwarves +##elis +dilapidated +##folk +astoria +philology +##lwyn +##otho +##saka +inducing +philanthropy +##bf +##itative +geek +markedly +sql +##yce +bessie +indices +rn +##flict +495 +frowns +resolving +weightlifting +tugs +cleric +contentious +1653 +mania +rms +##miya +##reate +##ruck +##tucket +bien +eels +marek +##ayton +##cence +discreet +unofficially +##ife +leaks +##bber +1705 +332 +dung +compressor +hillsborough +pandit +shillings +distal +##skin +381 +##tat +##you +nosed +##nir +mangrove +undeveloped +##idia +textures +##inho +##500 +##rise +ae +irritating +nay +amazingly +bancroft +apologetic +compassionate +kata +symphonies +##lovic +airspace +##lch +930 +gifford +precautions +fulfillment +sevilla +vulgar +martinique +##urities +looting +piccolo +tidy +##dermott +quadrant +armchair +incomes +mathematicians +stampede +nilsson +##inking +##scan +foo +quarterfinal +##ostal +shang +shouldered +squirrels +##owe +344 +vinegar +##bner +##rchy +##systems +delaying +##trics +ars +dwyer +rhapsody +sponsoring +##gration +bipolar +cinder +starters +##olio +##urst +421 +signage +##nty +aground +figurative +mons +acquaintances +duets +erroneously +soyuz +elliptic +recreated +##cultural +##quette +##ssed +##tma +##zcz +moderator +scares +##itaire +##stones +##udence +juniper +sighting +##just +##nsen +britten +calabria +ry +bop +cramer +forsyth +stillness +##л +airmen +gathers +unfit +##umber +##upt +taunting +##rip +seeker +streamlined +##bution +holster +schumann +tread +vox +##gano +##onzo +strive +dil +reforming +covent +newbury +predicting +##orro +decorate +tre +##puted +andover +ie +asahi +dept +dunkirk +gills +##tori +buren +huskies +##stis +##stov +abstracts +bets +loosen +##opa +1682 +yearning +##glio +##sir +berman +effortlessly +enamel +napoli +persist +##peration +##uez +attache +elisa +b1 +invitations +##kic +accelerating +reindeer +boardwalk +clutches +nelly +polka +starbucks +##kei +adamant +huey +lough +unbroken +adventurer +embroidery +inspecting +stanza +##ducted +naia +taluka +##pone +##roids +chases +deprivation +florian +##jing +##ppet +earthly +##lib +##ssee +colossal +foreigner +vet +freaks +patrice +rosewood +triassic +upstate +##pkins +dominates +ata +chants +ks +vo +##400 +##bley +##raya +##rmed +555 +agra +infiltrate +##ailing +##ilation +##tzer +##uppe +##werk +binoculars +enthusiast +fujian +squeak +##avs +abolitionist +almeida +boredom +hampstead +marsden +rations +##ands +inflated +334 +bonuses +rosalie +patna +##rco +329 +detachments +penitentiary +54th +flourishing +woolf +##dion +##etched +papyrus +##lster +##nsor +##toy +bobbed +dismounted +endelle +inhuman +motorola +tbs +wince +wreath +##ticus +hideout +inspections +sanjay +disgrace +infused +pudding +stalks +##urbed +arsenic +leases +##hyl +##rrard +collarbone +##waite +##wil +dowry +##bant +##edance +genealogical +nitrate +salamanca +scandals +thyroid +necessitated +##! +##" +### +##$ +##% +##& +##' +##( +##) +##* +##+ +##, +##- +##. +##/ +##: +##; +##< +##= +##> +##? +##@ +##[ +##\ +##] +##^ +##_ +##` +##{ +##| +##} +##~ +##¡ +##¢ +##£ +##¤ +##¥ +##¦ +##§ +##¨ +##© +##ª +##« +##¬ +##® +##± +##´ +##µ +##¶ +##· +##º +##» +##¼ +##¾ +##¿ +##æ +##ð +##÷ +##þ +##đ +##ħ +##ŋ +##œ +##ƒ +##ɐ +##ɑ +##ɒ +##ɔ +##ɕ +##ə +##ɡ +##ɣ +##ɨ +##ɪ +##ɫ +##ɬ +##ɯ +##ɲ +##ɴ +##ɹ +##ɾ +##ʀ +##ʁ +##ʂ +##ʃ +##ʉ +##ʊ +##ʋ +##ʌ +##ʎ +##ʐ +##ʑ +##ʒ +##ʔ +##ʰ +##ʲ +##ʳ +##ʷ +##ʸ +##ʻ +##ʼ +##ʾ +##ʿ +##ˈ +##ˡ +##ˢ +##ˣ +##ˤ +##β +##γ +##δ +##ε +##ζ +##θ +##κ +##λ +##μ +##ξ +##ο +##π +##ρ +##σ +##τ +##υ +##φ +##χ +##ψ +##ω +##б +##г +##д +##ж +##з +##м +##п +##с +##у +##ф +##х +##ц +##ч +##ш +##щ +##ъ +##э +##ю +##ђ +##є +##і +##ј +##љ +##њ +##ћ +##ӏ +##ա +##բ +##գ +##դ +##ե +##թ +##ի +##լ +##կ +##հ +##մ +##յ +##ն +##ո +##պ +##ս +##վ +##տ +##ր +##ւ +##ք +##־ +##א +##ב +##ג +##ד +##ו +##ז +##ח +##ט +##י +##ך +##כ +##ל +##ם +##מ +##ן +##נ +##ס +##ע +##ף +##פ +##ץ +##צ +##ק +##ר +##ש +##ת +##، +##ء +##ب +##ت +##ث +##ج +##ح +##خ +##ذ +##ز +##س +##ش +##ص +##ض +##ط +##ظ +##ع +##غ +##ـ +##ف +##ق +##ك +##و +##ى +##ٹ +##پ +##چ +##ک +##گ +##ں +##ھ +##ہ +##ے +##अ +##आ +##उ +##ए +##क +##ख +##ग +##च +##ज +##ट +##ड +##ण +##त +##थ +##द +##ध +##न +##प +##ब +##भ +##म +##य +##र +##ल +##व +##श +##ष +##स +##ह +##ा +##ि +##ी +##ो +##। +##॥ +##ং +##অ +##আ +##ই +##উ +##এ +##ও +##ক +##খ +##গ +##চ +##ছ +##জ +##ট +##ড +##ণ +##ত +##থ +##দ +##ধ +##ন +##প +##ব +##ভ +##ম +##য +##র +##ল +##শ +##ষ +##স +##হ +##া +##ি +##ী +##ে +##க +##ச +##ட +##த +##ந +##ன +##ப +##ம +##ய +##ர +##ல +##ள +##வ +##ா +##ி +##ு +##ே +##ை +##ನ +##ರ +##ಾ +##ක +##ය +##ර +##ල +##ව +##ා +##ก +##ง +##ต +##ท +##น +##พ +##ม +##ย +##ร +##ล +##ว +##ส +##อ +##า +##เ +##་ +##། +##ག +##ང +##ད +##ན +##པ +##བ +##མ +##འ +##ར +##ལ +##ས +##မ +##ა +##ბ +##გ +##დ +##ე +##ვ +##თ +##ი +##კ +##ლ +##მ +##ნ +##ო +##რ +##ს +##ტ +##უ +##ᄀ +##ᄂ +##ᄃ +##ᄅ +##ᄆ +##ᄇ +##ᄉ +##ᄊ +##ᄋ +##ᄌ +##ᄎ +##ᄏ +##ᄐ +##ᄑ +##ᄒ +##ᅡ +##ᅢ +##ᅥ +##ᅦ +##ᅧ +##ᅩ +##ᅪ +##ᅭ +##ᅮ +##ᅯ +##ᅲ +##ᅳ +##ᅴ +##ᅵ +##ᆨ +##ᆫ +##ᆯ +##ᆷ +##ᆸ +##ᆼ +##ᴬ +##ᴮ +##ᴰ +##ᴵ +##ᴺ +##ᵀ +##ᵃ +##ᵇ +##ᵈ +##ᵉ +##ᵍ +##ᵏ +##ᵐ +##ᵒ +##ᵖ +##ᵗ +##ᵘ +##ᵣ +##ᵤ +##ᵥ +##ᶜ +##ᶠ +##‐ +##‑ +##‒ +##– +##— +##― +##‖ +##‘ +##’ +##‚ +##“ +##” +##„ +##† +##‡ +##• +##… +##‰ +##′ +##″ +##› +##‿ +##⁄ +##⁰ +##ⁱ +##⁴ +##⁵ +##⁶ +##⁷ +##⁸ +##⁹ +##⁻ +##ⁿ +##₅ +##₆ +##₇ +##₈ +##₉ +##₊ +##₍ +##₎ +##ₐ +##ₑ +##ₒ +##ₓ +##ₕ +##ₖ +##ₗ +##ₘ +##ₚ +##ₛ +##ₜ +##₤ +##₩ +##€ +##₱ +##₹ +##ℓ +##№ +##ℝ +##™ +##⅓ +##⅔ +##← +##↑ +##→ +##↓ +##↔ +##↦ +##⇄ +##⇌ +##⇒ +##∂ +##∅ +##∆ +##∇ +##∈ +##∗ +##∘ +##√ +##∞ +##∧ +##∨ +##∩ +##∪ +##≈ +##≡ +##≤ +##≥ +##⊂ +##⊆ +##⊕ +##⊗ +##⋅ +##─ +##│ +##■ +##▪ +##● +##★ +##☆ +##☉ +##♠ +##♣ +##♥ +##♦ +##♯ +##⟨ +##⟩ +##ⱼ +##⺩ +##⺼ +##⽥ +##、 +##。 +##〈 +##〉 +##《 +##》 +##「 +##」 +##『 +##』 +##〜 +##あ +##い +##う +##え +##お +##か +##き +##く +##け +##こ +##さ +##し +##す +##せ +##そ +##た +##ち +##っ +##つ +##て +##と +##な +##に +##ぬ +##ね +##の +##は +##ひ +##ふ +##へ +##ほ +##ま +##み +##む +##め +##も +##や +##ゆ +##よ +##ら +##り +##る +##れ +##ろ +##を +##ん +##ァ +##ア +##ィ +##イ +##ウ +##ェ +##エ +##オ +##カ +##キ +##ク +##ケ +##コ +##サ +##シ +##ス +##セ +##タ +##チ +##ッ +##ツ +##テ +##ト +##ナ +##ニ +##ノ +##ハ +##ヒ +##フ +##ヘ +##ホ +##マ +##ミ +##ム +##メ +##モ +##ャ +##ュ +##ョ +##ラ +##リ +##ル +##レ +##ロ +##ワ +##ン +##・ +##ー +##一 +##三 +##上 +##下 +##不 +##世 +##中 +##主 +##久 +##之 +##也 +##事 +##二 +##五 +##井 +##京 +##人 +##亻 +##仁 +##介 +##代 +##仮 +##伊 +##会 +##佐 +##侍 +##保 +##信 +##健 +##元 +##光 +##八 +##公 +##内 +##出 +##分 +##前 +##劉 +##力 +##加 +##勝 +##北 +##区 +##十 +##千 +##南 +##博 +##原 +##口 +##古 +##史 +##司 +##合 +##吉 +##同 +##名 +##和 +##囗 +##四 +##国 +##國 +##土 +##地 +##坂 +##城 +##堂 +##場 +##士 +##夏 +##外 +##大 +##天 +##太 +##夫 +##奈 +##女 +##子 +##学 +##宀 +##宇 +##安 +##宗 +##定 +##宣 +##宮 +##家 +##宿 +##寺 +##將 +##小 +##尚 +##山 +##岡 +##島 +##崎 +##川 +##州 +##巿 +##帝 +##平 +##年 +##幸 +##广 +##弘 +##張 +##彳 +##後 +##御 +##德 +##心 +##忄 +##志 +##忠 +##愛 +##成 +##我 +##戦 +##戸 +##手 +##扌 +##政 +##文 +##新 +##方 +##日 +##明 +##星 +##春 +##昭 +##智 +##曲 +##書 +##月 +##有 +##朝 +##木 +##本 +##李 +##村 +##東 +##松 +##林 +##森 +##楊 +##樹 +##橋 +##歌 +##止 +##正 +##武 +##比 +##氏 +##民 +##水 +##氵 +##氷 +##永 +##江 +##沢 +##河 +##治 +##法 +##海 +##清 +##漢 +##瀬 +##火 +##版 +##犬 +##王 +##生 +##田 +##男 +##疒 +##発 +##白 +##的 +##皇 +##目 +##相 +##省 +##真 +##石 +##示 +##社 +##神 +##福 +##禾 +##秀 +##秋 +##空 +##立 +##章 +##竹 +##糹 +##美 +##義 +##耳 +##良 +##艹 +##花 +##英 +##華 +##葉 +##藤 +##行 +##街 +##西 +##見 +##訁 +##語 +##谷 +##貝 +##貴 +##車 +##軍 +##辶 +##道 +##郎 +##郡 +##部 +##都 +##里 +##野 +##金 +##鈴 +##镇 +##長 +##門 +##間 +##阝 +##阿 +##陳 +##陽 +##雄 +##青 +##面 +##風 +##食 +##香 +##馬 +##高 +##龍 +##龸 +##fi +##fl +##! +##( +##) +##, +##- +##. +##/ +##: +##? +##~ diff --git a/extras/BLIP/models/blip.py b/extras/BLIP/models/blip.py new file mode 100644 index 0000000000000000000000000000000000000000..a2566331dd659c14e708d8cc2fe0649f479d221a --- /dev/null +++ b/extras/BLIP/models/blip.py @@ -0,0 +1,239 @@ +''' + * Copyright (c) 2022, salesforce.com, inc. + * All rights reserved. + * SPDX-License-Identifier: BSD-3-Clause + * For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause + * By Junnan Li +''' +import warnings +warnings.filterwarnings("ignore") + +from extras.BLIP.models.vit import VisionTransformer, interpolate_pos_embed +from extras.BLIP.models.med import BertConfig, BertModel, BertLMHeadModel +from transformers import BertTokenizer + +import torch +from torch import nn +import torch.nn.functional as F + +import os +from urllib.parse import urlparse +from timm.models.hub import download_cached_file + +class BLIP_Base(nn.Module): + def __init__(self, + med_config = 'configs/med_config.json', + image_size = 224, + vit = 'base', + vit_grad_ckpt = False, + vit_ckpt_layer = 0, + ): + """ + Args: + med_config (str): path for the mixture of encoder-decoder model's configuration file + image_size (int): input image size + vit (str): model size of vision transformer + """ + super().__init__() + + self.visual_encoder, vision_width = create_vit(vit,image_size, vit_grad_ckpt, vit_ckpt_layer) + self.tokenizer = init_tokenizer() + med_config = BertConfig.from_json_file(med_config) + med_config.encoder_width = vision_width + self.text_encoder = BertModel(config=med_config, add_pooling_layer=False) + + + def forward(self, image, caption, mode): + + assert mode in ['image', 'text', 'multimodal'], "mode parameter must be image, text, or multimodal" + text = self.tokenizer(caption, return_tensors="pt").to(image.device) + + if mode=='image': + # return image features + image_embeds = self.visual_encoder(image) + return image_embeds + + elif mode=='text': + # return text features + text_output = self.text_encoder(text.input_ids, attention_mask = text.attention_mask, + return_dict = True, mode = 'text') + return text_output.last_hidden_state + + elif mode=='multimodal': + # return multimodel features + image_embeds = self.visual_encoder(image) + image_atts = torch.ones(image_embeds.size()[:-1],dtype=torch.long).to(image.device) + + text.input_ids[:,0] = self.tokenizer.enc_token_id + output = self.text_encoder(text.input_ids, + attention_mask = text.attention_mask, + encoder_hidden_states = image_embeds, + encoder_attention_mask = image_atts, + return_dict = True, + ) + return output.last_hidden_state + + + +class BLIP_Decoder(nn.Module): + def __init__(self, + med_config = 'configs/med_config.json', + image_size = 384, + vit = 'base', + vit_grad_ckpt = False, + vit_ckpt_layer = 0, + prompt = 'a picture of ', + ): + """ + Args: + med_config (str): path for the mixture of encoder-decoder model's configuration file + image_size (int): input image size + vit (str): model size of vision transformer + """ + super().__init__() + + self.visual_encoder, vision_width = create_vit(vit,image_size, vit_grad_ckpt, vit_ckpt_layer) + self.tokenizer = init_tokenizer() + med_config = BertConfig.from_json_file(med_config) + med_config.encoder_width = vision_width + self.text_decoder = BertLMHeadModel(config=med_config) + + self.prompt = prompt + self.prompt_length = len(self.tokenizer(self.prompt).input_ids)-1 + + + def forward(self, image, caption): + + image_embeds = self.visual_encoder(image) + image_atts = torch.ones(image_embeds.size()[:-1],dtype=torch.long).to(image.device) + + text = self.tokenizer(caption, padding='longest', truncation=True, max_length=40, return_tensors="pt").to(image.device) + + text.input_ids[:,0] = self.tokenizer.bos_token_id + + decoder_targets = text.input_ids.masked_fill(text.input_ids == self.tokenizer.pad_token_id, -100) + decoder_targets[:,:self.prompt_length] = -100 + + decoder_output = self.text_decoder(text.input_ids, + attention_mask = text.attention_mask, + encoder_hidden_states = image_embeds, + encoder_attention_mask = image_atts, + labels = decoder_targets, + return_dict = True, + ) + loss_lm = decoder_output.loss + + return loss_lm + + def generate(self, image, sample=False, num_beams=3, max_length=30, min_length=10, top_p=0.9, repetition_penalty=1.0): + image_embeds = self.visual_encoder(image) + + if not sample: + image_embeds = image_embeds.repeat_interleave(num_beams,dim=0) + + image_atts = torch.ones(image_embeds.size()[:-1],dtype=torch.long).to(image.device) + model_kwargs = {"encoder_hidden_states": image_embeds, "encoder_attention_mask":image_atts} + + prompt = [self.prompt] * image.size(0) + input_ids = self.tokenizer(prompt, return_tensors="pt").input_ids.to(image.device) + input_ids[:,0] = self.tokenizer.bos_token_id + input_ids = input_ids[:, :-1] + + if sample: + #nucleus sampling + outputs = self.text_decoder.generate(input_ids=input_ids, + max_length=max_length, + min_length=min_length, + do_sample=True, + top_p=top_p, + num_return_sequences=1, + eos_token_id=self.tokenizer.sep_token_id, + pad_token_id=self.tokenizer.pad_token_id, + repetition_penalty=1.1, + **model_kwargs) + else: + #beam search + outputs = self.text_decoder.generate(input_ids=input_ids, + max_length=max_length, + min_length=min_length, + num_beams=num_beams, + eos_token_id=self.tokenizer.sep_token_id, + pad_token_id=self.tokenizer.pad_token_id, + repetition_penalty=repetition_penalty, + **model_kwargs) + + captions = [] + for output in outputs: + caption = self.tokenizer.decode(output, skip_special_tokens=True) + captions.append(caption[len(self.prompt):]) + return captions + + +def blip_decoder(pretrained='',**kwargs): + model = BLIP_Decoder(**kwargs) + if pretrained: + model,msg = load_checkpoint(model,pretrained) + assert(len(msg.missing_keys)==0) + return model + +def blip_feature_extractor(pretrained='',**kwargs): + model = BLIP_Base(**kwargs) + if pretrained: + model,msg = load_checkpoint(model,pretrained) + assert(len(msg.missing_keys)==0) + return model + +def init_tokenizer(): + tokenizer_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "bert_tokenizer") + tokenizer = BertTokenizer.from_pretrained(tokenizer_path) + tokenizer.add_special_tokens({'bos_token':'[DEC]'}) + tokenizer.add_special_tokens({'additional_special_tokens':['[ENC]']}) + tokenizer.enc_token_id = tokenizer.additional_special_tokens_ids[0] + return tokenizer + + +def create_vit(vit, image_size, use_grad_checkpointing=False, ckpt_layer=0, drop_path_rate=0): + + assert vit in ['base', 'large'], "vit parameter must be base or large" + if vit=='base': + vision_width = 768 + visual_encoder = VisionTransformer(img_size=image_size, patch_size=16, embed_dim=vision_width, depth=12, + num_heads=12, use_grad_checkpointing=use_grad_checkpointing, ckpt_layer=ckpt_layer, + drop_path_rate=0 or drop_path_rate + ) + elif vit=='large': + vision_width = 1024 + visual_encoder = VisionTransformer(img_size=image_size, patch_size=16, embed_dim=vision_width, depth=24, + num_heads=16, use_grad_checkpointing=use_grad_checkpointing, ckpt_layer=ckpt_layer, + drop_path_rate=0.1 or drop_path_rate + ) + return visual_encoder, vision_width + +def is_url(url_or_filename): + parsed = urlparse(url_or_filename) + return parsed.scheme in ("http", "https") + +def load_checkpoint(model,url_or_filename): + if is_url(url_or_filename): + cached_file = download_cached_file(url_or_filename, check_hash=False, progress=True) + checkpoint = torch.load(cached_file, map_location='cpu') + elif os.path.isfile(url_or_filename): + checkpoint = torch.load(url_or_filename, map_location='cpu') + else: + raise RuntimeError('checkpoint url or path is invalid') + + state_dict = checkpoint['model'] + + state_dict['visual_encoder.pos_embed'] = interpolate_pos_embed(state_dict['visual_encoder.pos_embed'],model.visual_encoder) + if 'visual_encoder_m.pos_embed' in model.state_dict().keys(): + state_dict['visual_encoder_m.pos_embed'] = interpolate_pos_embed(state_dict['visual_encoder_m.pos_embed'], + model.visual_encoder_m) + for key in model.state_dict().keys(): + if key in state_dict.keys(): + if state_dict[key].shape!=model.state_dict()[key].shape: + del state_dict[key] + + msg = model.load_state_dict(state_dict,strict=False) + print('load checkpoint from %s'%url_or_filename) + return model,msg + diff --git a/extras/BLIP/models/blip_itm.py b/extras/BLIP/models/blip_itm.py new file mode 100644 index 0000000000000000000000000000000000000000..6f4da82180a463ccad6b0a4d61d6eed9b0d24bd6 --- /dev/null +++ b/extras/BLIP/models/blip_itm.py @@ -0,0 +1,76 @@ +from extras.BLIP.models.med import BertConfig, BertModel +from transformers import BertTokenizer + +import torch +from torch import nn +import torch.nn.functional as F + +from extras.BLIP.models.blip import create_vit, init_tokenizer, load_checkpoint + +class BLIP_ITM(nn.Module): + def __init__(self, + med_config = 'configs/med_config.json', + image_size = 384, + vit = 'base', + vit_grad_ckpt = False, + vit_ckpt_layer = 0, + embed_dim = 256, + ): + """ + Args: + med_config (str): path for the mixture of encoder-decoder model's configuration file + image_size (int): input image size + vit (str): model size of vision transformer + """ + super().__init__() + + self.visual_encoder, vision_width = create_vit(vit,image_size, vit_grad_ckpt, vit_ckpt_layer) + self.tokenizer = init_tokenizer() + med_config = BertConfig.from_json_file(med_config) + med_config.encoder_width = vision_width + self.text_encoder = BertModel(config=med_config, add_pooling_layer=False) + + text_width = self.text_encoder.config.hidden_size + + self.vision_proj = nn.Linear(vision_width, embed_dim) + self.text_proj = nn.Linear(text_width, embed_dim) + + self.itm_head = nn.Linear(text_width, 2) + + + def forward(self, image, caption, match_head='itm'): + + image_embeds = self.visual_encoder(image) + image_atts = torch.ones(image_embeds.size()[:-1],dtype=torch.long).to(image.device) + + text = self.tokenizer(caption, padding='max_length', truncation=True, max_length=35, + return_tensors="pt").to(image.device) + + + if match_head=='itm': + output = self.text_encoder(text.input_ids, + attention_mask = text.attention_mask, + encoder_hidden_states = image_embeds, + encoder_attention_mask = image_atts, + return_dict = True, + ) + itm_output = self.itm_head(output.last_hidden_state[:,0,:]) + return itm_output + + elif match_head=='itc': + text_output = self.text_encoder(text.input_ids, attention_mask = text.attention_mask, + return_dict = True, mode = 'text') + image_feat = F.normalize(self.vision_proj(image_embeds[:,0,:]),dim=-1) + text_feat = F.normalize(self.text_proj(text_output.last_hidden_state[:,0,:]),dim=-1) + + sim = image_feat @ text_feat.t() + return sim + + +def blip_itm(pretrained='',**kwargs): + model = BLIP_ITM(**kwargs) + if pretrained: + model,msg = load_checkpoint(model,pretrained) + assert(len(msg.missing_keys)==0) + return model + \ No newline at end of file diff --git a/extras/BLIP/models/blip_nlvr.py b/extras/BLIP/models/blip_nlvr.py new file mode 100644 index 0000000000000000000000000000000000000000..0eb9eaa696753d0c9e610507b765c2468dfe7adb --- /dev/null +++ b/extras/BLIP/models/blip_nlvr.py @@ -0,0 +1,105 @@ +from extras.BLIP.models.med import BertConfig +from extras.BLIP.models.nlvr_encoder import BertModel +from extras.BLIP.models.vit import interpolate_pos_embed +from extras.BLIP.models.blip import create_vit, init_tokenizer, is_url + +from timm.models.hub import download_cached_file + +import torch +from torch import nn +import torch.nn.functional as F +from transformers import BertTokenizer +import numpy as np +import os + + +class BLIP_NLVR(nn.Module): + def __init__(self, + med_config = 'configs/med_config.json', + image_size = 480, + vit = 'base', + vit_grad_ckpt = False, + vit_ckpt_layer = 0, + ): + """ + Args: + med_config (str): path for the mixture of encoder-decoder model's configuration file + image_size (int): input image size + vit (str): model size of vision transformer + """ + super().__init__() + + self.visual_encoder, vision_width = create_vit(vit,image_size, vit_grad_ckpt, vit_ckpt_layer, drop_path_rate=0.1) + self.tokenizer = init_tokenizer() + med_config = BertConfig.from_json_file(med_config) + med_config.encoder_width = vision_width + self.text_encoder = BertModel(config=med_config, add_pooling_layer=False) + + self.cls_head = nn.Sequential( + nn.Linear(self.text_encoder.config.hidden_size, self.text_encoder.config.hidden_size), + nn.ReLU(), + nn.Linear(self.text_encoder.config.hidden_size, 2) + ) + + def forward(self, image, text, targets, train=True): + + image_embeds = self.visual_encoder(image) + image_atts = torch.ones(image_embeds.size()[:-1],dtype=torch.long).to(image.device) + image0_embeds, image1_embeds = torch.split(image_embeds,targets.size(0)) + + text = self.tokenizer(text, padding='longest', return_tensors="pt").to(image.device) + text.input_ids[:,0] = self.tokenizer.enc_token_id + + output = self.text_encoder(text.input_ids, + attention_mask = text.attention_mask, + encoder_hidden_states = [image0_embeds,image1_embeds], + encoder_attention_mask = [image_atts[:image0_embeds.size(0)], + image_atts[image0_embeds.size(0):]], + return_dict = True, + ) + hidden_state = output.last_hidden_state[:,0,:] + prediction = self.cls_head(hidden_state) + + if train: + loss = F.cross_entropy(prediction, targets) + return loss + else: + return prediction + +def blip_nlvr(pretrained='',**kwargs): + model = BLIP_NLVR(**kwargs) + if pretrained: + model,msg = load_checkpoint(model,pretrained) + print("missing keys:") + print(msg.missing_keys) + return model + + +def load_checkpoint(model,url_or_filename): + if is_url(url_or_filename): + cached_file = download_cached_file(url_or_filename, check_hash=False, progress=True) + checkpoint = torch.load(cached_file, map_location='cpu') + elif os.path.isfile(url_or_filename): + checkpoint = torch.load(url_or_filename, map_location='cpu') + else: + raise RuntimeError('checkpoint url or path is invalid') + state_dict = checkpoint['model'] + + state_dict['visual_encoder.pos_embed'] = interpolate_pos_embed(state_dict['visual_encoder.pos_embed'],model.visual_encoder) + + for key in list(state_dict.keys()): + if 'crossattention.self.' in key: + new_key0 = key.replace('self','self0') + new_key1 = key.replace('self','self1') + state_dict[new_key0] = state_dict[key] + state_dict[new_key1] = state_dict[key] + elif 'crossattention.output.dense.' in key: + new_key0 = key.replace('dense','dense0') + new_key1 = key.replace('dense','dense1') + state_dict[new_key0] = state_dict[key] + state_dict[new_key1] = state_dict[key] + + msg = model.load_state_dict(state_dict,strict=False) + print('load checkpoint from %s'%url_or_filename) + return model,msg + \ No newline at end of file diff --git a/extras/BLIP/models/blip_pretrain.py b/extras/BLIP/models/blip_pretrain.py new file mode 100644 index 0000000000000000000000000000000000000000..9b8a3a47539c35b2e15def23acb392963f5b33ac --- /dev/null +++ b/extras/BLIP/models/blip_pretrain.py @@ -0,0 +1,339 @@ +''' + * Copyright (c) 2022, salesforce.com, inc. + * All rights reserved. + * SPDX-License-Identifier: BSD-3-Clause + * For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause + * By Junnan Li +''' +from extras.BLIP.models.med import BertConfig, BertModel, BertLMHeadModel +from transformers import BertTokenizer +import transformers +transformers.logging.set_verbosity_error() + +import torch +from torch import nn +import torch.nn.functional as F + +from extras.BLIP.models.blip import create_vit, init_tokenizer, load_checkpoint + +class BLIP_Pretrain(nn.Module): + def __init__(self, + med_config = 'configs/bert_config.json', + image_size = 224, + vit = 'base', + vit_grad_ckpt = False, + vit_ckpt_layer = 0, + embed_dim = 256, + queue_size = 57600, + momentum = 0.995, + ): + """ + Args: + med_config (str): path for the mixture of encoder-decoder model's configuration file + image_size (int): input image size + vit (str): model size of vision transformer + """ + super().__init__() + + self.visual_encoder, vision_width = create_vit(vit,image_size, vit_grad_ckpt, vit_ckpt_layer, 0) + + if vit=='base': + checkpoint = torch.hub.load_state_dict_from_url( + url="https://dl.fbaipublicfiles.com/deit/deit_base_patch16_224-b5f2ef4d.pth", + map_location="cpu", check_hash=True) + state_dict = checkpoint["model"] + msg = self.visual_encoder.load_state_dict(state_dict,strict=False) + elif vit=='large': + from timm.models.helpers import load_custom_pretrained + from timm.models.vision_transformer import default_cfgs + load_custom_pretrained(self.visual_encoder,default_cfgs['vit_large_patch16_224_in21k']) + + self.tokenizer = init_tokenizer() + encoder_config = BertConfig.from_json_file(med_config) + encoder_config.encoder_width = vision_width + self.text_encoder = BertModel.from_pretrained('bert-base-uncased',config=encoder_config, add_pooling_layer=False) + self.text_encoder.resize_token_embeddings(len(self.tokenizer)) + + text_width = self.text_encoder.config.hidden_size + + self.vision_proj = nn.Linear(vision_width, embed_dim) + self.text_proj = nn.Linear(text_width, embed_dim) + + self.itm_head = nn.Linear(text_width, 2) + + # create momentum encoders + self.visual_encoder_m, vision_width = create_vit(vit,image_size) + self.vision_proj_m = nn.Linear(vision_width, embed_dim) + self.text_encoder_m = BertModel(config=encoder_config, add_pooling_layer=False) + self.text_proj_m = nn.Linear(text_width, embed_dim) + + self.model_pairs = [[self.visual_encoder,self.visual_encoder_m], + [self.vision_proj,self.vision_proj_m], + [self.text_encoder,self.text_encoder_m], + [self.text_proj,self.text_proj_m], + ] + self.copy_params() + + # create the queue + self.register_buffer("image_queue", torch.randn(embed_dim, queue_size)) + self.register_buffer("text_queue", torch.randn(embed_dim, queue_size)) + self.register_buffer("queue_ptr", torch.zeros(1, dtype=torch.long)) + + self.image_queue = nn.functional.normalize(self.image_queue, dim=0) + self.text_queue = nn.functional.normalize(self.text_queue, dim=0) + + self.queue_size = queue_size + self.momentum = momentum + self.temp = nn.Parameter(0.07*torch.ones([])) + + # create the decoder + decoder_config = BertConfig.from_json_file(med_config) + decoder_config.encoder_width = vision_width + self.text_decoder = BertLMHeadModel.from_pretrained('bert-base-uncased',config=decoder_config) + self.text_decoder.resize_token_embeddings(len(self.tokenizer)) + tie_encoder_decoder_weights(self.text_encoder,self.text_decoder.bert,'','/attention') + + + def forward(self, image, caption, alpha): + with torch.no_grad(): + self.temp.clamp_(0.001,0.5) + + image_embeds = self.visual_encoder(image) + image_atts = torch.ones(image_embeds.size()[:-1],dtype=torch.long).to(image.device) + image_feat = F.normalize(self.vision_proj(image_embeds[:,0,:]),dim=-1) + + text = self.tokenizer(caption, padding='max_length', truncation=True, max_length=30, + return_tensors="pt").to(image.device) + text_output = self.text_encoder(text.input_ids, attention_mask = text.attention_mask, + return_dict = True, mode = 'text') + text_feat = F.normalize(self.text_proj(text_output.last_hidden_state[:,0,:]),dim=-1) + + # get momentum features + with torch.no_grad(): + self._momentum_update() + image_embeds_m = self.visual_encoder_m(image) + image_feat_m = F.normalize(self.vision_proj_m(image_embeds_m[:,0,:]),dim=-1) + image_feat_all = torch.cat([image_feat_m.t(),self.image_queue.clone().detach()],dim=1) + + text_output_m = self.text_encoder_m(text.input_ids, attention_mask = text.attention_mask, + return_dict = True, mode = 'text') + text_feat_m = F.normalize(self.text_proj_m(text_output_m.last_hidden_state[:,0,:]),dim=-1) + text_feat_all = torch.cat([text_feat_m.t(),self.text_queue.clone().detach()],dim=1) + + sim_i2t_m = image_feat_m @ text_feat_all / self.temp + sim_t2i_m = text_feat_m @ image_feat_all / self.temp + + sim_targets = torch.zeros(sim_i2t_m.size()).to(image.device) + sim_targets.fill_diagonal_(1) + + sim_i2t_targets = alpha * F.softmax(sim_i2t_m, dim=1) + (1 - alpha) * sim_targets + sim_t2i_targets = alpha * F.softmax(sim_t2i_m, dim=1) + (1 - alpha) * sim_targets + + sim_i2t = image_feat @ text_feat_all / self.temp + sim_t2i = text_feat @ image_feat_all / self.temp + + loss_i2t = -torch.sum(F.log_softmax(sim_i2t, dim=1)*sim_i2t_targets,dim=1).mean() + loss_t2i = -torch.sum(F.log_softmax(sim_t2i, dim=1)*sim_t2i_targets,dim=1).mean() + + loss_ita = (loss_i2t+loss_t2i)/2 + + self._dequeue_and_enqueue(image_feat_m, text_feat_m) + + ###============== Image-text Matching ===================### + encoder_input_ids = text.input_ids.clone() + encoder_input_ids[:,0] = self.tokenizer.enc_token_id + + # forward the positve image-text pair + bs = image.size(0) + output_pos = self.text_encoder(encoder_input_ids, + attention_mask = text.attention_mask, + encoder_hidden_states = image_embeds, + encoder_attention_mask = image_atts, + return_dict = True, + ) + with torch.no_grad(): + weights_t2i = F.softmax(sim_t2i[:,:bs],dim=1)+1e-4 + weights_t2i.fill_diagonal_(0) + weights_i2t = F.softmax(sim_i2t[:,:bs],dim=1)+1e-4 + weights_i2t.fill_diagonal_(0) + + # select a negative image for each text + image_embeds_neg = [] + for b in range(bs): + neg_idx = torch.multinomial(weights_t2i[b], 1).item() + image_embeds_neg.append(image_embeds[neg_idx]) + image_embeds_neg = torch.stack(image_embeds_neg,dim=0) + + # select a negative text for each image + text_ids_neg = [] + text_atts_neg = [] + for b in range(bs): + neg_idx = torch.multinomial(weights_i2t[b], 1).item() + text_ids_neg.append(encoder_input_ids[neg_idx]) + text_atts_neg.append(text.attention_mask[neg_idx]) + + text_ids_neg = torch.stack(text_ids_neg,dim=0) + text_atts_neg = torch.stack(text_atts_neg,dim=0) + + text_ids_all = torch.cat([encoder_input_ids, text_ids_neg],dim=0) + text_atts_all = torch.cat([text.attention_mask, text_atts_neg],dim=0) + + image_embeds_all = torch.cat([image_embeds_neg,image_embeds],dim=0) + image_atts_all = torch.cat([image_atts,image_atts],dim=0) + + output_neg = self.text_encoder(text_ids_all, + attention_mask = text_atts_all, + encoder_hidden_states = image_embeds_all, + encoder_attention_mask = image_atts_all, + return_dict = True, + ) + + vl_embeddings = torch.cat([output_pos.last_hidden_state[:,0,:], output_neg.last_hidden_state[:,0,:]],dim=0) + vl_output = self.itm_head(vl_embeddings) + + itm_labels = torch.cat([torch.ones(bs,dtype=torch.long),torch.zeros(2*bs,dtype=torch.long)], + dim=0).to(image.device) + loss_itm = F.cross_entropy(vl_output, itm_labels) + + ##================= LM ========================## + decoder_input_ids = text.input_ids.clone() + decoder_input_ids[:,0] = self.tokenizer.bos_token_id + decoder_targets = decoder_input_ids.masked_fill(decoder_input_ids == self.tokenizer.pad_token_id, -100) + + decoder_output = self.text_decoder(decoder_input_ids, + attention_mask = text.attention_mask, + encoder_hidden_states = image_embeds, + encoder_attention_mask = image_atts, + labels = decoder_targets, + return_dict = True, + ) + + loss_lm = decoder_output.loss + return loss_ita, loss_itm, loss_lm + + + + @torch.no_grad() + def copy_params(self): + for model_pair in self.model_pairs: + for param, param_m in zip(model_pair[0].parameters(), model_pair[1].parameters()): + param_m.data.copy_(param.data) # initialize + param_m.requires_grad = False # not update by gradient + + + @torch.no_grad() + def _momentum_update(self): + for model_pair in self.model_pairs: + for param, param_m in zip(model_pair[0].parameters(), model_pair[1].parameters()): + param_m.data = param_m.data * self.momentum + param.data * (1. - self.momentum) + + + @torch.no_grad() + def _dequeue_and_enqueue(self, image_feat, text_feat): + # gather keys before updating queue + image_feats = concat_all_gather(image_feat) + text_feats = concat_all_gather(text_feat) + + batch_size = image_feats.shape[0] + + ptr = int(self.queue_ptr) + assert self.queue_size % batch_size == 0 # for simplicity + + # replace the keys at ptr (dequeue and enqueue) + self.image_queue[:, ptr:ptr + batch_size] = image_feats.T + self.text_queue[:, ptr:ptr + batch_size] = text_feats.T + ptr = (ptr + batch_size) % self.queue_size # move pointer + + self.queue_ptr[0] = ptr + + +def blip_pretrain(**kwargs): + model = BLIP_Pretrain(**kwargs) + return model + + +@torch.no_grad() +def concat_all_gather(tensor): + """ + Performs all_gather operation on the provided tensors. + *** Warning ***: torch.distributed.all_gather has no gradient. + """ + tensors_gather = [torch.ones_like(tensor) + for _ in range(torch.distributed.get_world_size())] + torch.distributed.all_gather(tensors_gather, tensor, async_op=False) + + output = torch.cat(tensors_gather, dim=0) + return output + + +from typing import List +def tie_encoder_decoder_weights(encoder: nn.Module, decoder: nn.Module, base_model_prefix: str, skip_key:str): + uninitialized_encoder_weights: List[str] = [] + if decoder.__class__ != encoder.__class__: + print( + f"{decoder.__class__} and {encoder.__class__} are not equal. In this case make sure that all encoder weights are correctly initialized." + ) + + def tie_encoder_to_decoder_recursively( + decoder_pointer: nn.Module, + encoder_pointer: nn.Module, + module_name: str, + uninitialized_encoder_weights: List[str], + skip_key: str, + depth=0, + ): + assert isinstance(decoder_pointer, nn.Module) and isinstance( + encoder_pointer, nn.Module + ), f"{decoder_pointer} and {encoder_pointer} have to be of type torch.nn.Module" + if hasattr(decoder_pointer, "weight") and skip_key not in module_name: + assert hasattr(encoder_pointer, "weight") + encoder_pointer.weight = decoder_pointer.weight + if hasattr(decoder_pointer, "bias"): + assert hasattr(encoder_pointer, "bias") + encoder_pointer.bias = decoder_pointer.bias + print(module_name+' is tied') + return + + encoder_modules = encoder_pointer._modules + decoder_modules = decoder_pointer._modules + if len(decoder_modules) > 0: + assert ( + len(encoder_modules) > 0 + ), f"Encoder module {encoder_pointer} does not match decoder module {decoder_pointer}" + + all_encoder_weights = set([module_name + "/" + sub_name for sub_name in encoder_modules.keys()]) + encoder_layer_pos = 0 + for name, module in decoder_modules.items(): + if name.isdigit(): + encoder_name = str(int(name) + encoder_layer_pos) + decoder_name = name + if not isinstance(decoder_modules[decoder_name], type(encoder_modules[encoder_name])) and len( + encoder_modules + ) != len(decoder_modules): + # this can happen if the name corresponds to the position in a list module list of layers + # in this case the decoder has added a cross-attention that the encoder does not have + # thus skip this step and subtract one layer pos from encoder + encoder_layer_pos -= 1 + continue + elif name not in encoder_modules: + continue + elif depth > 500: + raise ValueError( + "Max depth of recursive function `tie_encoder_to_decoder` reached. It seems that there is a circular dependency between two or more `nn.Modules` of your model." + ) + else: + decoder_name = encoder_name = name + tie_encoder_to_decoder_recursively( + decoder_modules[decoder_name], + encoder_modules[encoder_name], + module_name + "/" + name, + uninitialized_encoder_weights, + skip_key, + depth=depth + 1, + ) + all_encoder_weights.remove(module_name + "/" + encoder_name) + + uninitialized_encoder_weights += list(all_encoder_weights) + + # tie weights recursively + tie_encoder_to_decoder_recursively(decoder, encoder, base_model_prefix, uninitialized_encoder_weights, skip_key) diff --git a/extras/BLIP/models/blip_retrieval.py b/extras/BLIP/models/blip_retrieval.py new file mode 100644 index 0000000000000000000000000000000000000000..09493586d60db9807f2dbfd5f7ff0117bcc89dad --- /dev/null +++ b/extras/BLIP/models/blip_retrieval.py @@ -0,0 +1,319 @@ +from extras.BLIP.models.med import BertConfig, BertModel +from transformers import BertTokenizer + +import torch +from torch import nn +import torch.nn.functional as F + +from extras.BLIP.models.blip import create_vit, init_tokenizer, load_checkpoint + +class BLIP_Retrieval(nn.Module): + def __init__(self, + med_config = 'configs/med_config.json', + image_size = 384, + vit = 'base', + vit_grad_ckpt = False, + vit_ckpt_layer = 0, + embed_dim = 256, + queue_size = 57600, + momentum = 0.995, + negative_all_rank = False, + ): + """ + Args: + med_config (str): path for the mixture of encoder-decoder model's configuration file + image_size (int): input image size + vit (str): model size of vision transformer + """ + super().__init__() + + self.visual_encoder, vision_width = create_vit(vit,image_size, vit_grad_ckpt, vit_ckpt_layer) + self.tokenizer = init_tokenizer() + med_config = BertConfig.from_json_file(med_config) + med_config.encoder_width = vision_width + self.text_encoder = BertModel(config=med_config, add_pooling_layer=False) + + text_width = self.text_encoder.config.hidden_size + + self.vision_proj = nn.Linear(vision_width, embed_dim) + self.text_proj = nn.Linear(text_width, embed_dim) + + self.itm_head = nn.Linear(text_width, 2) + + # create momentum encoders + self.visual_encoder_m, vision_width = create_vit(vit,image_size) + self.vision_proj_m = nn.Linear(vision_width, embed_dim) + self.text_encoder_m = BertModel(config=med_config, add_pooling_layer=False) + self.text_proj_m = nn.Linear(text_width, embed_dim) + + self.model_pairs = [[self.visual_encoder,self.visual_encoder_m], + [self.vision_proj,self.vision_proj_m], + [self.text_encoder,self.text_encoder_m], + [self.text_proj,self.text_proj_m], + ] + self.copy_params() + + # create the queue + self.register_buffer("image_queue", torch.randn(embed_dim, queue_size)) + self.register_buffer("text_queue", torch.randn(embed_dim, queue_size)) + self.register_buffer("idx_queue", torch.full((1,queue_size),-100)) + self.register_buffer("ptr_queue", torch.zeros(1, dtype=torch.long)) + + self.image_queue = nn.functional.normalize(self.image_queue, dim=0) + self.text_queue = nn.functional.normalize(self.text_queue, dim=0) + + self.queue_size = queue_size + self.momentum = momentum + self.temp = nn.Parameter(0.07*torch.ones([])) + + self.negative_all_rank = negative_all_rank + + + def forward(self, image, caption, alpha, idx): + with torch.no_grad(): + self.temp.clamp_(0.001,0.5) + + image_embeds = self.visual_encoder(image) + image_atts = torch.ones(image_embeds.size()[:-1],dtype=torch.long).to(image.device) + image_feat = F.normalize(self.vision_proj(image_embeds[:,0,:]),dim=-1) + + text = self.tokenizer(caption, padding='max_length', truncation=True, max_length=35, + return_tensors="pt").to(image.device) + + text_output = self.text_encoder(text.input_ids, attention_mask = text.attention_mask, + return_dict = True, mode = 'text') + text_feat = F.normalize(self.text_proj(text_output.last_hidden_state[:,0,:]),dim=-1) + + ###============== Image-text Contrastive Learning ===================### + idx = idx.view(-1,1) + idx_all = torch.cat([idx.t(), self.idx_queue.clone().detach()],dim=1) + pos_idx = torch.eq(idx, idx_all).float() + sim_targets = pos_idx / pos_idx.sum(1,keepdim=True) + + # get momentum features + with torch.no_grad(): + self._momentum_update() + image_embeds_m = self.visual_encoder_m(image) + image_feat_m = F.normalize(self.vision_proj_m(image_embeds_m[:,0,:]),dim=-1) + image_feat_m_all = torch.cat([image_feat_m.t(),self.image_queue.clone().detach()],dim=1) + + text_output_m = self.text_encoder_m(text.input_ids, attention_mask = text.attention_mask, + return_dict = True, mode = 'text') + text_feat_m = F.normalize(self.text_proj_m(text_output_m.last_hidden_state[:,0,:]),dim=-1) + text_feat_m_all = torch.cat([text_feat_m.t(),self.text_queue.clone().detach()],dim=1) + + sim_i2t_m = image_feat_m @ text_feat_m_all / self.temp + sim_t2i_m = text_feat_m @ image_feat_m_all / self.temp + + sim_i2t_targets = alpha * F.softmax(sim_i2t_m, dim=1) + (1 - alpha) * sim_targets + sim_t2i_targets = alpha * F.softmax(sim_t2i_m, dim=1) + (1 - alpha) * sim_targets + + sim_i2t = image_feat @ text_feat_m_all / self.temp + sim_t2i = text_feat @ image_feat_m_all / self.temp + + loss_i2t = -torch.sum(F.log_softmax(sim_i2t, dim=1)*sim_i2t_targets,dim=1).mean() + loss_t2i = -torch.sum(F.log_softmax(sim_t2i, dim=1)*sim_t2i_targets,dim=1).mean() + + loss_ita = (loss_i2t+loss_t2i)/2 + + idxs = concat_all_gather(idx) + self._dequeue_and_enqueue(image_feat_m, text_feat_m, idxs) + + ###============== Image-text Matching ===================### + encoder_input_ids = text.input_ids.clone() + encoder_input_ids[:,0] = self.tokenizer.enc_token_id + + # forward the positve image-text pair + bs = image.size(0) + output_pos = self.text_encoder(encoder_input_ids, + attention_mask = text.attention_mask, + encoder_hidden_states = image_embeds, + encoder_attention_mask = image_atts, + return_dict = True, + ) + + + if self.negative_all_rank: + # compute sample similarity + with torch.no_grad(): + mask = torch.eq(idx, idxs.t()) + + image_feat_world = concat_all_gather(image_feat) + text_feat_world = concat_all_gather(text_feat) + + sim_i2t = image_feat @ text_feat_world.t() / self.temp + sim_t2i = text_feat @ image_feat_world.t() / self.temp + + weights_i2t = F.softmax(sim_i2t,dim=1) + weights_i2t.masked_fill_(mask, 0) + + weights_t2i = F.softmax(sim_t2i,dim=1) + weights_t2i.masked_fill_(mask, 0) + + image_embeds_world = all_gather_with_grad(image_embeds) + + # select a negative image (from all ranks) for each text + image_embeds_neg = [] + for b in range(bs): + neg_idx = torch.multinomial(weights_t2i[b], 1).item() + image_embeds_neg.append(image_embeds_world[neg_idx]) + image_embeds_neg = torch.stack(image_embeds_neg,dim=0) + + # select a negative text (from all ranks) for each image + input_ids_world = concat_all_gather(encoder_input_ids) + att_mask_world = concat_all_gather(text.attention_mask) + + text_ids_neg = [] + text_atts_neg = [] + for b in range(bs): + neg_idx = torch.multinomial(weights_i2t[b], 1).item() + text_ids_neg.append(input_ids_world[neg_idx]) + text_atts_neg.append(att_mask_world[neg_idx]) + + else: + with torch.no_grad(): + mask = torch.eq(idx, idx.t()) + + sim_i2t = image_feat @ text_feat.t() / self.temp + sim_t2i = text_feat @ image_feat.t() / self.temp + + weights_i2t = F.softmax(sim_i2t,dim=1) + weights_i2t.masked_fill_(mask, 0) + + weights_t2i = F.softmax(sim_t2i,dim=1) + weights_t2i.masked_fill_(mask, 0) + + # select a negative image (from same rank) for each text + image_embeds_neg = [] + for b in range(bs): + neg_idx = torch.multinomial(weights_t2i[b], 1).item() + image_embeds_neg.append(image_embeds[neg_idx]) + image_embeds_neg = torch.stack(image_embeds_neg,dim=0) + + # select a negative text (from same rank) for each image + text_ids_neg = [] + text_atts_neg = [] + for b in range(bs): + neg_idx = torch.multinomial(weights_i2t[b], 1).item() + text_ids_neg.append(encoder_input_ids[neg_idx]) + text_atts_neg.append(text.attention_mask[neg_idx]) + + text_ids_neg = torch.stack(text_ids_neg,dim=0) + text_atts_neg = torch.stack(text_atts_neg,dim=0) + + text_ids_all = torch.cat([encoder_input_ids, text_ids_neg],dim=0) + text_atts_all = torch.cat([text.attention_mask, text_atts_neg],dim=0) + + image_embeds_all = torch.cat([image_embeds_neg,image_embeds],dim=0) + image_atts_all = torch.cat([image_atts,image_atts],dim=0) + + output_neg = self.text_encoder(text_ids_all, + attention_mask = text_atts_all, + encoder_hidden_states = image_embeds_all, + encoder_attention_mask = image_atts_all, + return_dict = True, + ) + + + vl_embeddings = torch.cat([output_pos.last_hidden_state[:,0,:], output_neg.last_hidden_state[:,0,:]],dim=0) + vl_output = self.itm_head(vl_embeddings) + + itm_labels = torch.cat([torch.ones(bs,dtype=torch.long),torch.zeros(2*bs,dtype=torch.long)], + dim=0).to(image.device) + loss_itm = F.cross_entropy(vl_output, itm_labels) + + return loss_ita, loss_itm + + + @torch.no_grad() + def copy_params(self): + for model_pair in self.model_pairs: + for param, param_m in zip(model_pair[0].parameters(), model_pair[1].parameters()): + param_m.data.copy_(param.data) # initialize + param_m.requires_grad = False # not update by gradient + + + @torch.no_grad() + def _momentum_update(self): + for model_pair in self.model_pairs: + for param, param_m in zip(model_pair[0].parameters(), model_pair[1].parameters()): + param_m.data = param_m.data * self.momentum + param.data * (1. - self.momentum) + + + @torch.no_grad() + def _dequeue_and_enqueue(self, image_feat, text_feat, idxs): + # gather keys before updating queue + image_feats = concat_all_gather(image_feat) + text_feats = concat_all_gather(text_feat) + + + batch_size = image_feats.shape[0] + + ptr = int(self.ptr_queue) + assert self.queue_size % batch_size == 0 # for simplicity + + # replace the keys at ptr (dequeue and enqueue) + self.image_queue[:, ptr:ptr + batch_size] = image_feats.T + self.text_queue[:, ptr:ptr + batch_size] = text_feats.T + self.idx_queue[:, ptr:ptr + batch_size] = idxs.T + ptr = (ptr + batch_size) % self.queue_size # move pointer + + self.ptr_queue[0] = ptr + + +def blip_retrieval(pretrained='',**kwargs): + model = BLIP_Retrieval(**kwargs) + if pretrained: + model,msg = load_checkpoint(model,pretrained) + print("missing keys:") + print(msg.missing_keys) + return model + + +@torch.no_grad() +def concat_all_gather(tensor): + """ + Performs all_gather operation on the provided tensors. + *** Warning ***: torch.distributed.all_gather has no gradient. + """ + tensors_gather = [torch.ones_like(tensor) + for _ in range(torch.distributed.get_world_size())] + torch.distributed.all_gather(tensors_gather, tensor, async_op=False) + + output = torch.cat(tensors_gather, dim=0) + return output + + +class GatherLayer(torch.autograd.Function): + """ + Gather tensors from all workers with support for backward propagation: + This implementation does not cut the gradients as torch.distributed.all_gather does. + """ + + @staticmethod + def forward(ctx, x): + output = [torch.zeros_like(x) for _ in range(torch.distributed.get_world_size())] + torch.distributed.all_gather(output, x) + return tuple(output) + + @staticmethod + def backward(ctx, *grads): + all_gradients = torch.stack(grads) + torch.distributed.all_reduce(all_gradients) + return all_gradients[torch.distributed.get_rank()] + + +def all_gather_with_grad(tensors): + """ + Performs all_gather operation on the provided tensors. + Graph remains connected for backward grad computation. + """ + # Queue the gathered tensors + world_size = torch.distributed.get_world_size() + # There is no need for reduction in the single-proc case + if world_size == 1: + return tensors + + tensor_all = GatherLayer.apply(tensors) + + return torch.cat(tensor_all, dim=0) diff --git a/extras/BLIP/models/blip_vqa.py b/extras/BLIP/models/blip_vqa.py new file mode 100644 index 0000000000000000000000000000000000000000..99928a8b50a4521e2f8ea074ee8dafd197e6fcc4 --- /dev/null +++ b/extras/BLIP/models/blip_vqa.py @@ -0,0 +1,186 @@ +from extras.BLIP.models.med import BertConfig, BertModel, BertLMHeadModel +from extras.BLIP.models.blip import create_vit, init_tokenizer, load_checkpoint + +import torch +from torch import nn +import torch.nn.functional as F +from transformers import BertTokenizer +import numpy as np + +class BLIP_VQA(nn.Module): + def __init__(self, + med_config = 'configs/med_config.json', + image_size = 480, + vit = 'base', + vit_grad_ckpt = False, + vit_ckpt_layer = 0, + ): + """ + Args: + med_config (str): path for the mixture of encoder-decoder model's configuration file + image_size (int): input image size + vit (str): model size of vision transformer + """ + super().__init__() + + self.visual_encoder, vision_width = create_vit(vit, image_size, vit_grad_ckpt, vit_ckpt_layer, drop_path_rate=0.1) + self.tokenizer = init_tokenizer() + + encoder_config = BertConfig.from_json_file(med_config) + encoder_config.encoder_width = vision_width + self.text_encoder = BertModel(config=encoder_config, add_pooling_layer=False) + + decoder_config = BertConfig.from_json_file(med_config) + self.text_decoder = BertLMHeadModel(config=decoder_config) + + + def forward(self, image, question, answer=None, n=None, weights=None, train=True, inference='rank', k_test=128): + + image_embeds = self.visual_encoder(image) + image_atts = torch.ones(image_embeds.size()[:-1],dtype=torch.long).to(image.device) + + question = self.tokenizer(question, padding='longest', truncation=True, max_length=35, + return_tensors="pt").to(image.device) + question.input_ids[:,0] = self.tokenizer.enc_token_id + + if train: + ''' + n: number of answers for each question + weights: weight for each answer + ''' + answer = self.tokenizer(answer, padding='longest', return_tensors="pt").to(image.device) + answer.input_ids[:,0] = self.tokenizer.bos_token_id + answer_targets = answer.input_ids.masked_fill(answer.input_ids == self.tokenizer.pad_token_id, -100) + + question_output = self.text_encoder(question.input_ids, + attention_mask = question.attention_mask, + encoder_hidden_states = image_embeds, + encoder_attention_mask = image_atts, + return_dict = True) + + question_states = [] + question_atts = [] + for b, n in enumerate(n): + question_states += [question_output.last_hidden_state[b]]*n + question_atts += [question.attention_mask[b]]*n + question_states = torch.stack(question_states,0) + question_atts = torch.stack(question_atts,0) + + answer_output = self.text_decoder(answer.input_ids, + attention_mask = answer.attention_mask, + encoder_hidden_states = question_states, + encoder_attention_mask = question_atts, + labels = answer_targets, + return_dict = True, + reduction = 'none', + ) + + loss = weights * answer_output.loss + loss = loss.sum()/image.size(0) + + return loss + + + else: + question_output = self.text_encoder(question.input_ids, + attention_mask = question.attention_mask, + encoder_hidden_states = image_embeds, + encoder_attention_mask = image_atts, + return_dict = True) + + if inference=='generate': + num_beams = 3 + question_states = question_output.last_hidden_state.repeat_interleave(num_beams,dim=0) + question_atts = torch.ones(question_states.size()[:-1],dtype=torch.long).to(question_states.device) + model_kwargs = {"encoder_hidden_states": question_states, "encoder_attention_mask":question_atts} + + bos_ids = torch.full((image.size(0),1),fill_value=self.tokenizer.bos_token_id,device=image.device) + + outputs = self.text_decoder.generate(input_ids=bos_ids, + max_length=10, + min_length=1, + num_beams=num_beams, + eos_token_id=self.tokenizer.sep_token_id, + pad_token_id=self.tokenizer.pad_token_id, + **model_kwargs) + + answers = [] + for output in outputs: + answer = self.tokenizer.decode(output, skip_special_tokens=True) + answers.append(answer) + return answers + + elif inference=='rank': + max_ids = self.rank_answer(question_output.last_hidden_state, question.attention_mask, + answer.input_ids, answer.attention_mask, k_test) + return max_ids + + + + def rank_answer(self, question_states, question_atts, answer_ids, answer_atts, k): + + num_ques = question_states.size(0) + start_ids = answer_ids[0,0].repeat(num_ques,1) # bos token + + start_output = self.text_decoder(start_ids, + encoder_hidden_states = question_states, + encoder_attention_mask = question_atts, + return_dict = True, + reduction = 'none') + logits = start_output.logits[:,0,:] # first token's logit + + # topk_probs: top-k probability + # topk_ids: [num_question, k] + answer_first_token = answer_ids[:,1] + prob_first_token = F.softmax(logits,dim=1).index_select(dim=1, index=answer_first_token) + topk_probs, topk_ids = prob_first_token.topk(k,dim=1) + + # answer input: [num_question*k, answer_len] + input_ids = [] + input_atts = [] + for b, topk_id in enumerate(topk_ids): + input_ids.append(answer_ids.index_select(dim=0, index=topk_id)) + input_atts.append(answer_atts.index_select(dim=0, index=topk_id)) + input_ids = torch.cat(input_ids,dim=0) + input_atts = torch.cat(input_atts,dim=0) + + targets_ids = input_ids.masked_fill(input_ids == self.tokenizer.pad_token_id, -100) + + # repeat encoder's output for top-k answers + question_states = tile(question_states, 0, k) + question_atts = tile(question_atts, 0, k) + + output = self.text_decoder(input_ids, + attention_mask = input_atts, + encoder_hidden_states = question_states, + encoder_attention_mask = question_atts, + labels = targets_ids, + return_dict = True, + reduction = 'none') + + log_probs_sum = -output.loss + log_probs_sum = log_probs_sum.view(num_ques,k) + + max_topk_ids = log_probs_sum.argmax(dim=1) + max_ids = topk_ids[max_topk_ids>=0,max_topk_ids] + + return max_ids + + +def blip_vqa(pretrained='',**kwargs): + model = BLIP_VQA(**kwargs) + if pretrained: + model,msg = load_checkpoint(model,pretrained) +# assert(len(msg.missing_keys)==0) + return model + + +def tile(x, dim, n_tile): + init_dim = x.size(dim) + repeat_idx = [1] * x.dim() + repeat_idx[dim] = n_tile + x = x.repeat(*(repeat_idx)) + order_index = torch.LongTensor(np.concatenate([init_dim * np.arange(n_tile) + i for i in range(init_dim)])) + return torch.index_select(x, dim, order_index.to(x.device)) + + \ No newline at end of file diff --git a/extras/BLIP/models/med.py b/extras/BLIP/models/med.py new file mode 100644 index 0000000000000000000000000000000000000000..7b00a35450b736180a805d4f4664b4fb95aeba01 --- /dev/null +++ b/extras/BLIP/models/med.py @@ -0,0 +1,955 @@ +''' + * Copyright (c) 2022, salesforce.com, inc. + * All rights reserved. + * SPDX-License-Identifier: BSD-3-Clause + * For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause + * By Junnan Li + * Based on huggingface code base + * https://github.com/huggingface/transformers/blob/v4.15.0/src/transformers/models/bert +''' + +import math +import os +import warnings +from dataclasses import dataclass +from typing import Optional, Tuple + +import torch +from torch import Tensor, device, dtype, nn +import torch.utils.checkpoint +from torch import nn +from torch.nn import CrossEntropyLoss +import torch.nn.functional as F + +from transformers.activations import ACT2FN +from transformers.file_utils import ( + ModelOutput, +) +from transformers.modeling_outputs import ( + BaseModelOutputWithPastAndCrossAttentions, + BaseModelOutputWithPoolingAndCrossAttentions, + CausalLMOutputWithCrossAttentions, + MaskedLMOutput, + MultipleChoiceModelOutput, + NextSentencePredictorOutput, + QuestionAnsweringModelOutput, + SequenceClassifierOutput, + TokenClassifierOutput, +) +from transformers.modeling_utils import ( + PreTrainedModel, + apply_chunking_to_forward, + find_pruneable_heads_and_indices, + prune_linear_layer, +) +from transformers.utils import logging +from transformers.models.bert.configuration_bert import BertConfig + + +logger = logging.get_logger(__name__) + + +class BertEmbeddings(nn.Module): + """Construct the embeddings from word and position embeddings.""" + + def __init__(self, config): + super().__init__() + self.word_embeddings = nn.Embedding(config.vocab_size, config.hidden_size, padding_idx=config.pad_token_id) + self.position_embeddings = nn.Embedding(config.max_position_embeddings, config.hidden_size) + + # self.LayerNorm is not snake-cased to stick with TensorFlow model variable name and be able to load + # any TensorFlow checkpoint file + self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) + self.dropout = nn.Dropout(config.hidden_dropout_prob) + + # position_ids (1, len position emb) is contiguous in memory and exported when serialized + self.register_buffer("position_ids", torch.arange(config.max_position_embeddings).expand((1, -1))) + self.position_embedding_type = getattr(config, "position_embedding_type", "absolute") + + self.config = config + + def forward( + self, input_ids=None, position_ids=None, inputs_embeds=None, past_key_values_length=0 + ): + if input_ids is not None: + input_shape = input_ids.size() + else: + input_shape = inputs_embeds.size()[:-1] + + seq_length = input_shape[1] + + if position_ids is None: + position_ids = self.position_ids[:, past_key_values_length : seq_length + past_key_values_length] + + if inputs_embeds is None: + inputs_embeds = self.word_embeddings(input_ids) + + embeddings = inputs_embeds + + if self.position_embedding_type == "absolute": + position_embeddings = self.position_embeddings(position_ids) + embeddings += position_embeddings + embeddings = self.LayerNorm(embeddings) + embeddings = self.dropout(embeddings) + return embeddings + + +class BertSelfAttention(nn.Module): + def __init__(self, config, is_cross_attention): + super().__init__() + self.config = config + if config.hidden_size % config.num_attention_heads != 0 and not hasattr(config, "embedding_size"): + raise ValueError( + "The hidden size (%d) is not a multiple of the number of attention " + "heads (%d)" % (config.hidden_size, config.num_attention_heads) + ) + + self.num_attention_heads = config.num_attention_heads + self.attention_head_size = int(config.hidden_size / config.num_attention_heads) + self.all_head_size = self.num_attention_heads * self.attention_head_size + + self.query = nn.Linear(config.hidden_size, self.all_head_size) + if is_cross_attention: + self.key = nn.Linear(config.encoder_width, self.all_head_size) + self.value = nn.Linear(config.encoder_width, self.all_head_size) + else: + self.key = nn.Linear(config.hidden_size, self.all_head_size) + self.value = nn.Linear(config.hidden_size, self.all_head_size) + + self.dropout = nn.Dropout(config.attention_probs_dropout_prob) + self.position_embedding_type = getattr(config, "position_embedding_type", "absolute") + if self.position_embedding_type == "relative_key" or self.position_embedding_type == "relative_key_query": + self.max_position_embeddings = config.max_position_embeddings + self.distance_embedding = nn.Embedding(2 * config.max_position_embeddings - 1, self.attention_head_size) + self.save_attention = False + + def save_attn_gradients(self, attn_gradients): + self.attn_gradients = attn_gradients + + def get_attn_gradients(self): + return self.attn_gradients + + def save_attention_map(self, attention_map): + self.attention_map = attention_map + + def get_attention_map(self): + return self.attention_map + + def transpose_for_scores(self, x): + new_x_shape = x.size()[:-1] + (self.num_attention_heads, self.attention_head_size) + x = x.view(*new_x_shape) + return x.permute(0, 2, 1, 3) + + def forward( + self, + hidden_states, + attention_mask=None, + head_mask=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + past_key_value=None, + output_attentions=False, + ): + mixed_query_layer = self.query(hidden_states) + + # If this is instantiated as a cross-attention module, the keys + # and values come from an encoder; the attention mask needs to be + # such that the encoder's padding tokens are not attended to. + is_cross_attention = encoder_hidden_states is not None + + if is_cross_attention: + key_layer = self.transpose_for_scores(self.key(encoder_hidden_states)) + value_layer = self.transpose_for_scores(self.value(encoder_hidden_states)) + attention_mask = encoder_attention_mask + elif past_key_value is not None: + key_layer = self.transpose_for_scores(self.key(hidden_states)) + value_layer = self.transpose_for_scores(self.value(hidden_states)) + key_layer = torch.cat([past_key_value[0], key_layer], dim=2) + value_layer = torch.cat([past_key_value[1], value_layer], dim=2) + else: + key_layer = self.transpose_for_scores(self.key(hidden_states)) + value_layer = self.transpose_for_scores(self.value(hidden_states)) + + query_layer = self.transpose_for_scores(mixed_query_layer) + + past_key_value = (key_layer, value_layer) + + # Take the dot product between "query" and "key" to get the raw attention scores. + attention_scores = torch.matmul(query_layer, key_layer.transpose(-1, -2)) + + if self.position_embedding_type == "relative_key" or self.position_embedding_type == "relative_key_query": + seq_length = hidden_states.size()[1] + position_ids_l = torch.arange(seq_length, dtype=torch.long, device=hidden_states.device).view(-1, 1) + position_ids_r = torch.arange(seq_length, dtype=torch.long, device=hidden_states.device).view(1, -1) + distance = position_ids_l - position_ids_r + positional_embedding = self.distance_embedding(distance + self.max_position_embeddings - 1) + positional_embedding = positional_embedding.to(dtype=query_layer.dtype) # fp16 compatibility + + if self.position_embedding_type == "relative_key": + relative_position_scores = torch.einsum("bhld,lrd->bhlr", query_layer, positional_embedding) + attention_scores = attention_scores + relative_position_scores + elif self.position_embedding_type == "relative_key_query": + relative_position_scores_query = torch.einsum("bhld,lrd->bhlr", query_layer, positional_embedding) + relative_position_scores_key = torch.einsum("bhrd,lrd->bhlr", key_layer, positional_embedding) + attention_scores = attention_scores + relative_position_scores_query + relative_position_scores_key + + attention_scores = attention_scores / math.sqrt(self.attention_head_size) + if attention_mask is not None: + # Apply the attention mask is (precomputed for all layers in BertModel forward() function) + attention_scores = attention_scores + attention_mask + + # Normalize the attention scores to probabilities. + attention_probs = nn.Softmax(dim=-1)(attention_scores) + + if is_cross_attention and self.save_attention: + self.save_attention_map(attention_probs) + attention_probs.register_hook(self.save_attn_gradients) + + # This is actually dropping out entire tokens to attend to, which might + # seem a bit unusual, but is taken from the original Transformer paper. + attention_probs_dropped = self.dropout(attention_probs) + + # Mask heads if we want to + if head_mask is not None: + attention_probs_dropped = attention_probs_dropped * head_mask + + context_layer = torch.matmul(attention_probs_dropped, value_layer) + + context_layer = context_layer.permute(0, 2, 1, 3).contiguous() + new_context_layer_shape = context_layer.size()[:-2] + (self.all_head_size,) + context_layer = context_layer.view(*new_context_layer_shape) + + outputs = (context_layer, attention_probs) if output_attentions else (context_layer,) + + outputs = outputs + (past_key_value,) + return outputs + + +class BertSelfOutput(nn.Module): + def __init__(self, config): + super().__init__() + self.dense = nn.Linear(config.hidden_size, config.hidden_size) + self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) + self.dropout = nn.Dropout(config.hidden_dropout_prob) + + def forward(self, hidden_states, input_tensor): + hidden_states = self.dense(hidden_states) + hidden_states = self.dropout(hidden_states) + hidden_states = self.LayerNorm(hidden_states + input_tensor) + return hidden_states + + +class BertAttention(nn.Module): + def __init__(self, config, is_cross_attention=False): + super().__init__() + self.self = BertSelfAttention(config, is_cross_attention) + self.output = BertSelfOutput(config) + self.pruned_heads = set() + + def prune_heads(self, heads): + if len(heads) == 0: + return + heads, index = find_pruneable_heads_and_indices( + heads, self.self.num_attention_heads, self.self.attention_head_size, self.pruned_heads + ) + + # Prune linear layers + self.self.query = prune_linear_layer(self.self.query, index) + self.self.key = prune_linear_layer(self.self.key, index) + self.self.value = prune_linear_layer(self.self.value, index) + self.output.dense = prune_linear_layer(self.output.dense, index, dim=1) + + # Update hyper params and store pruned heads + self.self.num_attention_heads = self.self.num_attention_heads - len(heads) + self.self.all_head_size = self.self.attention_head_size * self.self.num_attention_heads + self.pruned_heads = self.pruned_heads.union(heads) + + def forward( + self, + hidden_states, + attention_mask=None, + head_mask=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + past_key_value=None, + output_attentions=False, + ): + self_outputs = self.self( + hidden_states, + attention_mask, + head_mask, + encoder_hidden_states, + encoder_attention_mask, + past_key_value, + output_attentions, + ) + attention_output = self.output(self_outputs[0], hidden_states) + outputs = (attention_output,) + self_outputs[1:] # add attentions if we output them + return outputs + + +class BertIntermediate(nn.Module): + def __init__(self, config): + super().__init__() + self.dense = nn.Linear(config.hidden_size, config.intermediate_size) + if isinstance(config.hidden_act, str): + self.intermediate_act_fn = ACT2FN[config.hidden_act] + else: + self.intermediate_act_fn = config.hidden_act + + def forward(self, hidden_states): + hidden_states = self.dense(hidden_states) + hidden_states = self.intermediate_act_fn(hidden_states) + return hidden_states + + +class BertOutput(nn.Module): + def __init__(self, config): + super().__init__() + self.dense = nn.Linear(config.intermediate_size, config.hidden_size) + self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) + self.dropout = nn.Dropout(config.hidden_dropout_prob) + + def forward(self, hidden_states, input_tensor): + hidden_states = self.dense(hidden_states) + hidden_states = self.dropout(hidden_states) + hidden_states = self.LayerNorm(hidden_states + input_tensor) + return hidden_states + + +class BertLayer(nn.Module): + def __init__(self, config, layer_num): + super().__init__() + self.config = config + self.chunk_size_feed_forward = config.chunk_size_feed_forward + self.seq_len_dim = 1 + self.attention = BertAttention(config) + self.layer_num = layer_num + if self.config.add_cross_attention: + self.crossattention = BertAttention(config, is_cross_attention=self.config.add_cross_attention) + self.intermediate = BertIntermediate(config) + self.output = BertOutput(config) + + def forward( + self, + hidden_states, + attention_mask=None, + head_mask=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + past_key_value=None, + output_attentions=False, + mode=None, + ): + # decoder uni-directional self-attention cached key/values tuple is at positions 1,2 + self_attn_past_key_value = past_key_value[:2] if past_key_value is not None else None + self_attention_outputs = self.attention( + hidden_states, + attention_mask, + head_mask, + output_attentions=output_attentions, + past_key_value=self_attn_past_key_value, + ) + attention_output = self_attention_outputs[0] + + outputs = self_attention_outputs[1:-1] + present_key_value = self_attention_outputs[-1] + + if mode=='multimodal': + assert encoder_hidden_states is not None, "encoder_hidden_states must be given for cross-attention layers" + + cross_attention_outputs = self.crossattention( + attention_output, + attention_mask, + head_mask, + encoder_hidden_states, + encoder_attention_mask, + output_attentions=output_attentions, + ) + attention_output = cross_attention_outputs[0] + outputs = outputs + cross_attention_outputs[1:-1] # add cross attentions if we output attention weights + layer_output = apply_chunking_to_forward( + self.feed_forward_chunk, self.chunk_size_feed_forward, self.seq_len_dim, attention_output + ) + outputs = (layer_output,) + outputs + + outputs = outputs + (present_key_value,) + + return outputs + + def feed_forward_chunk(self, attention_output): + intermediate_output = self.intermediate(attention_output) + layer_output = self.output(intermediate_output, attention_output) + return layer_output + + +class BertEncoder(nn.Module): + def __init__(self, config): + super().__init__() + self.config = config + self.layer = nn.ModuleList([BertLayer(config,i) for i in range(config.num_hidden_layers)]) + self.gradient_checkpointing = False + + def forward( + self, + hidden_states, + attention_mask=None, + head_mask=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + past_key_values=None, + use_cache=None, + output_attentions=False, + output_hidden_states=False, + return_dict=True, + mode='multimodal', + ): + all_hidden_states = () if output_hidden_states else None + all_self_attentions = () if output_attentions else None + all_cross_attentions = () if output_attentions and self.config.add_cross_attention else None + + next_decoder_cache = () if use_cache else None + + for i in range(self.config.num_hidden_layers): + layer_module = self.layer[i] + if output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states,) + + layer_head_mask = head_mask[i] if head_mask is not None else None + past_key_value = past_key_values[i] if past_key_values is not None else None + + if self.gradient_checkpointing and self.training: + + if use_cache: + logger.warn( + "`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..." + ) + use_cache = False + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs, past_key_value, output_attentions) + + return custom_forward + + layer_outputs = torch.utils.checkpoint.checkpoint( + create_custom_forward(layer_module), + hidden_states, + attention_mask, + layer_head_mask, + encoder_hidden_states, + encoder_attention_mask, + mode=mode, + ) + else: + layer_outputs = layer_module( + hidden_states, + attention_mask, + layer_head_mask, + encoder_hidden_states, + encoder_attention_mask, + past_key_value, + output_attentions, + mode=mode, + ) + + hidden_states = layer_outputs[0] + if use_cache: + next_decoder_cache += (layer_outputs[-1],) + if output_attentions: + all_self_attentions = all_self_attentions + (layer_outputs[1],) + + if output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states,) + + if not return_dict: + return tuple( + v + for v in [ + hidden_states, + next_decoder_cache, + all_hidden_states, + all_self_attentions, + all_cross_attentions, + ] + if v is not None + ) + return BaseModelOutputWithPastAndCrossAttentions( + last_hidden_state=hidden_states, + past_key_values=next_decoder_cache, + hidden_states=all_hidden_states, + attentions=all_self_attentions, + cross_attentions=all_cross_attentions, + ) + + +class BertPooler(nn.Module): + def __init__(self, config): + super().__init__() + self.dense = nn.Linear(config.hidden_size, config.hidden_size) + self.activation = nn.Tanh() + + def forward(self, hidden_states): + # We "pool" the model by simply taking the hidden state corresponding + # to the first token. + first_token_tensor = hidden_states[:, 0] + pooled_output = self.dense(first_token_tensor) + pooled_output = self.activation(pooled_output) + return pooled_output + + +class BertPredictionHeadTransform(nn.Module): + def __init__(self, config): + super().__init__() + self.dense = nn.Linear(config.hidden_size, config.hidden_size) + if isinstance(config.hidden_act, str): + self.transform_act_fn = ACT2FN[config.hidden_act] + else: + self.transform_act_fn = config.hidden_act + self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) + + def forward(self, hidden_states): + hidden_states = self.dense(hidden_states) + hidden_states = self.transform_act_fn(hidden_states) + hidden_states = self.LayerNorm(hidden_states) + return hidden_states + + +class BertLMPredictionHead(nn.Module): + def __init__(self, config): + super().__init__() + self.transform = BertPredictionHeadTransform(config) + + # The output weights are the same as the input embeddings, but there is + # an output-only bias for each token. + self.decoder = nn.Linear(config.hidden_size, config.vocab_size, bias=False) + + self.bias = nn.Parameter(torch.zeros(config.vocab_size)) + + # Need a link between the two variables so that the bias is correctly resized with `resize_token_embeddings` + self.decoder.bias = self.bias + + def forward(self, hidden_states): + hidden_states = self.transform(hidden_states) + hidden_states = self.decoder(hidden_states) + return hidden_states + + +class BertOnlyMLMHead(nn.Module): + def __init__(self, config): + super().__init__() + self.predictions = BertLMPredictionHead(config) + + def forward(self, sequence_output): + prediction_scores = self.predictions(sequence_output) + return prediction_scores + + +class BertPreTrainedModel(PreTrainedModel): + """ + An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained + models. + """ + + config_class = BertConfig + base_model_prefix = "bert" + _keys_to_ignore_on_load_missing = [r"position_ids"] + + def _init_weights(self, module): + """ Initialize the weights """ + if isinstance(module, (nn.Linear, nn.Embedding)): + # Slightly different from the TF version which uses truncated_normal for initialization + # cf https://github.com/pytorch/pytorch/pull/5617 + module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) + elif isinstance(module, nn.LayerNorm): + module.bias.data.zero_() + module.weight.data.fill_(1.0) + if isinstance(module, nn.Linear) and module.bias is not None: + module.bias.data.zero_() + + +class BertModel(BertPreTrainedModel): + """ + The model can behave as an encoder (with only self-attention) as well as a decoder, in which case a layer of + cross-attention is added between the self-attention layers, following the architecture described in `Attention is + all you need `__ by Ashish Vaswani, Noam Shazeer, Niki Parmar, Jakob Uszkoreit, + Llion Jones, Aidan N. Gomez, Lukasz Kaiser and Illia Polosukhin. + argument and :obj:`add_cross_attention` set to :obj:`True`; an :obj:`encoder_hidden_states` is then expected as an + input to the forward pass. + """ + + def __init__(self, config, add_pooling_layer=True): + super().__init__(config) + self.config = config + + self.embeddings = BertEmbeddings(config) + + self.encoder = BertEncoder(config) + + self.pooler = BertPooler(config) if add_pooling_layer else None + + self.init_weights() + + + def get_input_embeddings(self): + return self.embeddings.word_embeddings + + def set_input_embeddings(self, value): + self.embeddings.word_embeddings = value + + def _prune_heads(self, heads_to_prune): + """ + Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base + class PreTrainedModel + """ + for layer, heads in heads_to_prune.items(): + self.encoder.layer[layer].attention.prune_heads(heads) + + + def get_extended_attention_mask(self, attention_mask: Tensor, input_shape: Tuple[int], device: device, is_decoder: bool) -> Tensor: + """ + Makes broadcastable attention and causal masks so that future and masked tokens are ignored. + + Arguments: + attention_mask (:obj:`torch.Tensor`): + Mask with ones indicating tokens to attend to, zeros for tokens to ignore. + input_shape (:obj:`Tuple[int]`): + The shape of the input to the model. + device: (:obj:`torch.device`): + The device of the input to the model. + + Returns: + :obj:`torch.Tensor` The extended attention mask, with a the same dtype as :obj:`attention_mask.dtype`. + """ + # We can provide a self-attention mask of dimensions [batch_size, from_seq_length, to_seq_length] + # ourselves in which case we just need to make it broadcastable to all heads. + if attention_mask.dim() == 3: + extended_attention_mask = attention_mask[:, None, :, :] + elif attention_mask.dim() == 2: + # Provided a padding mask of dimensions [batch_size, seq_length] + # - if the model is a decoder, apply a causal mask in addition to the padding mask + # - if the model is an encoder, make the mask broadcastable to [batch_size, num_heads, seq_length, seq_length] + if is_decoder: + batch_size, seq_length = input_shape + + seq_ids = torch.arange(seq_length, device=device) + causal_mask = seq_ids[None, None, :].repeat(batch_size, seq_length, 1) <= seq_ids[None, :, None] + # in case past_key_values are used we need to add a prefix ones mask to the causal mask + # causal and attention masks must have same type with pytorch version < 1.3 + causal_mask = causal_mask.to(attention_mask.dtype) + + if causal_mask.shape[1] < attention_mask.shape[1]: + prefix_seq_len = attention_mask.shape[1] - causal_mask.shape[1] + causal_mask = torch.cat( + [ + torch.ones((batch_size, seq_length, prefix_seq_len), device=device, dtype=causal_mask.dtype), + causal_mask, + ], + axis=-1, + ) + + extended_attention_mask = causal_mask[:, None, :, :] * attention_mask[:, None, None, :] + else: + extended_attention_mask = attention_mask[:, None, None, :] + else: + raise ValueError( + "Wrong shape for input_ids (shape {}) or attention_mask (shape {})".format( + input_shape, attention_mask.shape + ) + ) + + # Since attention_mask is 1.0 for positions we want to attend and 0.0 for + # masked positions, this operation will create a tensor which is 0.0 for + # positions we want to attend and -10000.0 for masked positions. + # Since we are adding it to the raw scores before the softmax, this is + # effectively the same as removing these entirely. + extended_attention_mask = extended_attention_mask.to(dtype=self.dtype) # fp16 compatibility + extended_attention_mask = (1.0 - extended_attention_mask) * -10000.0 + return extended_attention_mask + + def forward( + self, + input_ids=None, + attention_mask=None, + position_ids=None, + head_mask=None, + inputs_embeds=None, + encoder_embeds=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + past_key_values=None, + use_cache=None, + output_attentions=None, + output_hidden_states=None, + return_dict=None, + is_decoder=False, + mode='multimodal', + ): + r""" + encoder_hidden_states (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`): + Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention if + the model is configured as a decoder. + encoder_attention_mask (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): + Mask to avoid performing attention on the padding token indices of the encoder input. This mask is used in + the cross-attention if the model is configured as a decoder. Mask values selected in ``[0, 1]``: + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + past_key_values (:obj:`tuple(tuple(torch.FloatTensor))` of length :obj:`config.n_layers` with each tuple having 4 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`): + Contains precomputed key and value hidden states of the attention blocks. Can be used to speed up decoding. + If :obj:`past_key_values` are used, the user can optionally input only the last :obj:`decoder_input_ids` + (those that don't have their past key value states given to this model) of shape :obj:`(batch_size, 1)` + instead of all :obj:`decoder_input_ids` of shape :obj:`(batch_size, sequence_length)`. + use_cache (:obj:`bool`, `optional`): + If set to :obj:`True`, :obj:`past_key_values` key value states are returned and can be used to speed up + decoding (see :obj:`past_key_values`). + """ + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + if is_decoder: + use_cache = use_cache if use_cache is not None else self.config.use_cache + else: + use_cache = False + + if input_ids is not None and inputs_embeds is not None: + raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time") + elif input_ids is not None: + input_shape = input_ids.size() + batch_size, seq_length = input_shape + device = input_ids.device + elif inputs_embeds is not None: + input_shape = inputs_embeds.size()[:-1] + batch_size, seq_length = input_shape + device = inputs_embeds.device + elif encoder_embeds is not None: + input_shape = encoder_embeds.size()[:-1] + batch_size, seq_length = input_shape + device = encoder_embeds.device + else: + raise ValueError("You have to specify either input_ids or inputs_embeds or encoder_embeds") + + # past_key_values_length + past_key_values_length = past_key_values[0][0].shape[2] if past_key_values is not None else 0 + + if attention_mask is None: + attention_mask = torch.ones(((batch_size, seq_length + past_key_values_length)), device=device) + + # We can provide a self-attention mask of dimensions [batch_size, from_seq_length, to_seq_length] + # ourselves in which case we just need to make it broadcastable to all heads. + extended_attention_mask: torch.Tensor = self.get_extended_attention_mask(attention_mask, input_shape, + device, is_decoder) + + # If a 2D or 3D attention mask is provided for the cross-attention + # we need to make broadcastable to [batch_size, num_heads, seq_length, seq_length] + if encoder_hidden_states is not None: + if type(encoder_hidden_states) == list: + encoder_batch_size, encoder_sequence_length, _ = encoder_hidden_states[0].size() + else: + encoder_batch_size, encoder_sequence_length, _ = encoder_hidden_states.size() + encoder_hidden_shape = (encoder_batch_size, encoder_sequence_length) + + if type(encoder_attention_mask) == list: + encoder_extended_attention_mask = [self.invert_attention_mask(mask) for mask in encoder_attention_mask] + elif encoder_attention_mask is None: + encoder_attention_mask = torch.ones(encoder_hidden_shape, device=device) + encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask) + else: + encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask) + else: + encoder_extended_attention_mask = None + + # Prepare head mask if needed + # 1.0 in head_mask indicate we keep the head + # attention_probs has shape bsz x n_heads x N x N + # input head_mask has shape [num_heads] or [num_hidden_layers x num_heads] + # and head_mask is converted to shape [num_hidden_layers x batch x num_heads x seq_length x seq_length] + head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers) + + if encoder_embeds is None: + embedding_output = self.embeddings( + input_ids=input_ids, + position_ids=position_ids, + inputs_embeds=inputs_embeds, + past_key_values_length=past_key_values_length, + ) + else: + embedding_output = encoder_embeds + + encoder_outputs = self.encoder( + embedding_output, + attention_mask=extended_attention_mask, + head_mask=head_mask, + encoder_hidden_states=encoder_hidden_states, + encoder_attention_mask=encoder_extended_attention_mask, + past_key_values=past_key_values, + use_cache=use_cache, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + mode=mode, + ) + sequence_output = encoder_outputs[0] + pooled_output = self.pooler(sequence_output) if self.pooler is not None else None + + if not return_dict: + return (sequence_output, pooled_output) + encoder_outputs[1:] + + return BaseModelOutputWithPoolingAndCrossAttentions( + last_hidden_state=sequence_output, + pooler_output=pooled_output, + past_key_values=encoder_outputs.past_key_values, + hidden_states=encoder_outputs.hidden_states, + attentions=encoder_outputs.attentions, + cross_attentions=encoder_outputs.cross_attentions, + ) + + + +class BertLMHeadModel(BertPreTrainedModel): + + _keys_to_ignore_on_load_unexpected = [r"pooler"] + _keys_to_ignore_on_load_missing = [r"position_ids", r"predictions.decoder.bias"] + + def __init__(self, config): + super().__init__(config) + + self.bert = BertModel(config, add_pooling_layer=False) + self.cls = BertOnlyMLMHead(config) + + self.init_weights() + + def get_output_embeddings(self): + return self.cls.predictions.decoder + + def set_output_embeddings(self, new_embeddings): + self.cls.predictions.decoder = new_embeddings + + def forward( + self, + input_ids=None, + attention_mask=None, + position_ids=None, + head_mask=None, + inputs_embeds=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + labels=None, + past_key_values=None, + use_cache=None, + output_attentions=None, + output_hidden_states=None, + return_dict=None, + return_logits=False, + is_decoder=True, + reduction='mean', + mode='multimodal', + ): + r""" + encoder_hidden_states (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`): + Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention if + the model is configured as a decoder. + encoder_attention_mask (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): + Mask to avoid performing attention on the padding token indices of the encoder input. This mask is used in + the cross-attention if the model is configured as a decoder. Mask values selected in ``[0, 1]``: + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): + Labels for computing the left-to-right language modeling loss (next word prediction). Indices should be in + ``[-100, 0, ..., config.vocab_size]`` (see ``input_ids`` docstring) Tokens with indices set to ``-100`` are + ignored (masked), the loss is only computed for the tokens with labels n ``[0, ..., config.vocab_size]`` + past_key_values (:obj:`tuple(tuple(torch.FloatTensor))` of length :obj:`config.n_layers` with each tuple having 4 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`): + Contains precomputed key and value hidden states of the attention blocks. Can be used to speed up decoding. + If :obj:`past_key_values` are used, the user can optionally input only the last :obj:`decoder_input_ids` + (those that don't have their past key value states given to this model) of shape :obj:`(batch_size, 1)` + instead of all :obj:`decoder_input_ids` of shape :obj:`(batch_size, sequence_length)`. + use_cache (:obj:`bool`, `optional`): + If set to :obj:`True`, :obj:`past_key_values` key value states are returned and can be used to speed up + decoding (see :obj:`past_key_values`). + Returns: + Example:: + >>> from transformers import BertTokenizer, BertLMHeadModel, BertConfig + >>> import torch + >>> tokenizer = BertTokenizer.from_pretrained('bert-base-cased') + >>> config = BertConfig.from_pretrained("bert-base-cased") + >>> model = BertLMHeadModel.from_pretrained('bert-base-cased', config=config) + >>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt") + >>> outputs = model(**inputs) + >>> prediction_logits = outputs.logits + """ + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + if labels is not None: + use_cache = False + + outputs = self.bert( + input_ids, + attention_mask=attention_mask, + position_ids=position_ids, + head_mask=head_mask, + inputs_embeds=inputs_embeds, + encoder_hidden_states=encoder_hidden_states, + encoder_attention_mask=encoder_attention_mask, + past_key_values=past_key_values, + use_cache=use_cache, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + is_decoder=is_decoder, + mode=mode, + ) + + sequence_output = outputs[0] + prediction_scores = self.cls(sequence_output) + + if return_logits: + return prediction_scores[:, :-1, :].contiguous() + + lm_loss = None + if labels is not None: + # we are doing next-token prediction; shift prediction scores and input ids by one + shifted_prediction_scores = prediction_scores[:, :-1, :].contiguous() + labels = labels[:, 1:].contiguous() + loss_fct = CrossEntropyLoss(reduction=reduction, label_smoothing=0.1) + lm_loss = loss_fct(shifted_prediction_scores.view(-1, self.config.vocab_size), labels.view(-1)) + if reduction=='none': + lm_loss = lm_loss.view(prediction_scores.size(0),-1).sum(1) + + if not return_dict: + output = (prediction_scores,) + outputs[2:] + return ((lm_loss,) + output) if lm_loss is not None else output + + return CausalLMOutputWithCrossAttentions( + loss=lm_loss, + logits=prediction_scores, + past_key_values=outputs.past_key_values, + hidden_states=outputs.hidden_states, + attentions=outputs.attentions, + cross_attentions=outputs.cross_attentions, + ) + + def prepare_inputs_for_generation(self, input_ids, past=None, attention_mask=None, **model_kwargs): + input_shape = input_ids.shape + # if model is used as a decoder in encoder-decoder model, the decoder attention mask is created on the fly + if attention_mask is None: + attention_mask = input_ids.new_ones(input_shape) + + # cut decoder_input_ids if past is used + if past is not None: + input_ids = input_ids[:, -1:] + + return { + "input_ids": input_ids, + "attention_mask": attention_mask, + "past_key_values": past, + "encoder_hidden_states": model_kwargs.get("encoder_hidden_states", None), + "encoder_attention_mask": model_kwargs.get("encoder_attention_mask", None), + "is_decoder": True, + } + + def _reorder_cache(self, past, beam_idx): + reordered_past = () + for layer_past in past: + reordered_past += (tuple(past_state.index_select(0, beam_idx) for past_state in layer_past),) + return reordered_past diff --git a/extras/BLIP/models/nlvr_encoder.py b/extras/BLIP/models/nlvr_encoder.py new file mode 100644 index 0000000000000000000000000000000000000000..1946bb4a300f75afa4848f6622839445903c34a9 --- /dev/null +++ b/extras/BLIP/models/nlvr_encoder.py @@ -0,0 +1,843 @@ +import math +import os +import warnings +from dataclasses import dataclass +from typing import Optional, Tuple + +import torch +from torch import Tensor, device, dtype, nn +import torch.utils.checkpoint +from torch import nn +from torch.nn import CrossEntropyLoss +import torch.nn.functional as F + +from transformers.activations import ACT2FN +from transformers.file_utils import ( + ModelOutput, +) +from transformers.modeling_outputs import ( + BaseModelOutputWithPastAndCrossAttentions, + BaseModelOutputWithPoolingAndCrossAttentions, + CausalLMOutputWithCrossAttentions, + MaskedLMOutput, + MultipleChoiceModelOutput, + NextSentencePredictorOutput, + QuestionAnsweringModelOutput, + SequenceClassifierOutput, + TokenClassifierOutput, +) +from transformers.modeling_utils import ( + PreTrainedModel, + apply_chunking_to_forward, + find_pruneable_heads_and_indices, + prune_linear_layer, +) +from transformers.utils import logging +from transformers.models.bert.configuration_bert import BertConfig + + +logger = logging.get_logger(__name__) + + +class BertEmbeddings(nn.Module): + """Construct the embeddings from word and position embeddings.""" + + def __init__(self, config): + super().__init__() + self.word_embeddings = nn.Embedding(config.vocab_size, config.hidden_size, padding_idx=config.pad_token_id) + self.position_embeddings = nn.Embedding(config.max_position_embeddings, config.hidden_size) + + # self.LayerNorm is not snake-cased to stick with TensorFlow model variable name and be able to load + # any TensorFlow checkpoint file + self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) + self.dropout = nn.Dropout(config.hidden_dropout_prob) + + # position_ids (1, len position emb) is contiguous in memory and exported when serialized + self.register_buffer("position_ids", torch.arange(config.max_position_embeddings).expand((1, -1))) + self.position_embedding_type = getattr(config, "position_embedding_type", "absolute") + + self.config = config + + def forward( + self, input_ids=None, position_ids=None, inputs_embeds=None, past_key_values_length=0 + ): + if input_ids is not None: + input_shape = input_ids.size() + else: + input_shape = inputs_embeds.size()[:-1] + + seq_length = input_shape[1] + + if position_ids is None: + position_ids = self.position_ids[:, past_key_values_length : seq_length + past_key_values_length] + + if inputs_embeds is None: + inputs_embeds = self.word_embeddings(input_ids) + + embeddings = inputs_embeds + + if self.position_embedding_type == "absolute": + position_embeddings = self.position_embeddings(position_ids) + embeddings += position_embeddings + embeddings = self.LayerNorm(embeddings) + embeddings = self.dropout(embeddings) + return embeddings + + +class BertSelfAttention(nn.Module): + def __init__(self, config, is_cross_attention): + super().__init__() + self.config = config + if config.hidden_size % config.num_attention_heads != 0 and not hasattr(config, "embedding_size"): + raise ValueError( + "The hidden size (%d) is not a multiple of the number of attention " + "heads (%d)" % (config.hidden_size, config.num_attention_heads) + ) + + self.num_attention_heads = config.num_attention_heads + self.attention_head_size = int(config.hidden_size / config.num_attention_heads) + self.all_head_size = self.num_attention_heads * self.attention_head_size + + self.query = nn.Linear(config.hidden_size, self.all_head_size) + if is_cross_attention: + self.key = nn.Linear(config.encoder_width, self.all_head_size) + self.value = nn.Linear(config.encoder_width, self.all_head_size) + else: + self.key = nn.Linear(config.hidden_size, self.all_head_size) + self.value = nn.Linear(config.hidden_size, self.all_head_size) + + self.dropout = nn.Dropout(config.attention_probs_dropout_prob) + self.position_embedding_type = getattr(config, "position_embedding_type", "absolute") + if self.position_embedding_type == "relative_key" or self.position_embedding_type == "relative_key_query": + self.max_position_embeddings = config.max_position_embeddings + self.distance_embedding = nn.Embedding(2 * config.max_position_embeddings - 1, self.attention_head_size) + self.save_attention = False + + def save_attn_gradients(self, attn_gradients): + self.attn_gradients = attn_gradients + + def get_attn_gradients(self): + return self.attn_gradients + + def save_attention_map(self, attention_map): + self.attention_map = attention_map + + def get_attention_map(self): + return self.attention_map + + def transpose_for_scores(self, x): + new_x_shape = x.size()[:-1] + (self.num_attention_heads, self.attention_head_size) + x = x.view(*new_x_shape) + return x.permute(0, 2, 1, 3) + + def forward( + self, + hidden_states, + attention_mask=None, + head_mask=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + past_key_value=None, + output_attentions=False, + ): + mixed_query_layer = self.query(hidden_states) + + # If this is instantiated as a cross-attention module, the keys + # and values come from an encoder; the attention mask needs to be + # such that the encoder's padding tokens are not attended to. + is_cross_attention = encoder_hidden_states is not None + + if is_cross_attention: + key_layer = self.transpose_for_scores(self.key(encoder_hidden_states)) + value_layer = self.transpose_for_scores(self.value(encoder_hidden_states)) + attention_mask = encoder_attention_mask + elif past_key_value is not None: + key_layer = self.transpose_for_scores(self.key(hidden_states)) + value_layer = self.transpose_for_scores(self.value(hidden_states)) + key_layer = torch.cat([past_key_value[0], key_layer], dim=2) + value_layer = torch.cat([past_key_value[1], value_layer], dim=2) + else: + key_layer = self.transpose_for_scores(self.key(hidden_states)) + value_layer = self.transpose_for_scores(self.value(hidden_states)) + + query_layer = self.transpose_for_scores(mixed_query_layer) + + past_key_value = (key_layer, value_layer) + + # Take the dot product between "query" and "key" to get the raw attention scores. + attention_scores = torch.matmul(query_layer, key_layer.transpose(-1, -2)) + + if self.position_embedding_type == "relative_key" or self.position_embedding_type == "relative_key_query": + seq_length = hidden_states.size()[1] + position_ids_l = torch.arange(seq_length, dtype=torch.long, device=hidden_states.device).view(-1, 1) + position_ids_r = torch.arange(seq_length, dtype=torch.long, device=hidden_states.device).view(1, -1) + distance = position_ids_l - position_ids_r + positional_embedding = self.distance_embedding(distance + self.max_position_embeddings - 1) + positional_embedding = positional_embedding.to(dtype=query_layer.dtype) # fp16 compatibility + + if self.position_embedding_type == "relative_key": + relative_position_scores = torch.einsum("bhld,lrd->bhlr", query_layer, positional_embedding) + attention_scores = attention_scores + relative_position_scores + elif self.position_embedding_type == "relative_key_query": + relative_position_scores_query = torch.einsum("bhld,lrd->bhlr", query_layer, positional_embedding) + relative_position_scores_key = torch.einsum("bhrd,lrd->bhlr", key_layer, positional_embedding) + attention_scores = attention_scores + relative_position_scores_query + relative_position_scores_key + + attention_scores = attention_scores / math.sqrt(self.attention_head_size) + if attention_mask is not None: + # Apply the attention mask is (precomputed for all layers in BertModel forward() function) + attention_scores = attention_scores + attention_mask + + # Normalize the attention scores to probabilities. + attention_probs = nn.Softmax(dim=-1)(attention_scores) + + if is_cross_attention and self.save_attention: + self.save_attention_map(attention_probs) + attention_probs.register_hook(self.save_attn_gradients) + + # This is actually dropping out entire tokens to attend to, which might + # seem a bit unusual, but is taken from the original Transformer paper. + attention_probs_dropped = self.dropout(attention_probs) + + # Mask heads if we want to + if head_mask is not None: + attention_probs_dropped = attention_probs_dropped * head_mask + + context_layer = torch.matmul(attention_probs_dropped, value_layer) + + context_layer = context_layer.permute(0, 2, 1, 3).contiguous() + new_context_layer_shape = context_layer.size()[:-2] + (self.all_head_size,) + context_layer = context_layer.view(*new_context_layer_shape) + + outputs = (context_layer, attention_probs) if output_attentions else (context_layer,) + + outputs = outputs + (past_key_value,) + return outputs + + +class BertSelfOutput(nn.Module): + def __init__(self, config, twin=False, merge=False): + super().__init__() + self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) + self.dropout = nn.Dropout(config.hidden_dropout_prob) + if twin: + self.dense0 = nn.Linear(config.hidden_size, config.hidden_size) + self.dense1 = nn.Linear(config.hidden_size, config.hidden_size) + else: + self.dense = nn.Linear(config.hidden_size, config.hidden_size) + if merge: + self.act = ACT2FN[config.hidden_act] + self.merge_layer = nn.Linear(config.hidden_size * 2, config.hidden_size) + self.merge = True + else: + self.merge = False + + def forward(self, hidden_states, input_tensor): + if type(hidden_states) == list: + hidden_states0 = self.dense0(hidden_states[0]) + hidden_states1 = self.dense1(hidden_states[1]) + if self.merge: + #hidden_states = self.merge_layer(self.act(torch.cat([hidden_states0,hidden_states1],dim=-1))) + hidden_states = self.merge_layer(torch.cat([hidden_states0,hidden_states1],dim=-1)) + else: + hidden_states = (hidden_states0+hidden_states1)/2 + else: + hidden_states = self.dense(hidden_states) + hidden_states = self.dropout(hidden_states) + hidden_states = self.LayerNorm(hidden_states + input_tensor) + return hidden_states + + +class BertAttention(nn.Module): + def __init__(self, config, is_cross_attention=False, layer_num=-1): + super().__init__() + if is_cross_attention: + self.self0 = BertSelfAttention(config, is_cross_attention) + self.self1 = BertSelfAttention(config, is_cross_attention) + else: + self.self = BertSelfAttention(config, is_cross_attention) + self.output = BertSelfOutput(config, twin=is_cross_attention, merge=(is_cross_attention and layer_num>=6)) + self.pruned_heads = set() + + def prune_heads(self, heads): + if len(heads) == 0: + return + heads, index = find_pruneable_heads_and_indices( + heads, self.self.num_attention_heads, self.self.attention_head_size, self.pruned_heads + ) + + # Prune linear layers + self.self.query = prune_linear_layer(self.self.query, index) + self.self.key = prune_linear_layer(self.self.key, index) + self.self.value = prune_linear_layer(self.self.value, index) + self.output.dense = prune_linear_layer(self.output.dense, index, dim=1) + + # Update hyper params and store pruned heads + self.self.num_attention_heads = self.self.num_attention_heads - len(heads) + self.self.all_head_size = self.self.attention_head_size * self.self.num_attention_heads + self.pruned_heads = self.pruned_heads.union(heads) + + def forward( + self, + hidden_states, + attention_mask=None, + head_mask=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + past_key_value=None, + output_attentions=False, + ): + if type(encoder_hidden_states)==list: + self_outputs0 = self.self0( + hidden_states, + attention_mask, + head_mask, + encoder_hidden_states[0], + encoder_attention_mask[0], + past_key_value, + output_attentions, + ) + self_outputs1 = self.self1( + hidden_states, + attention_mask, + head_mask, + encoder_hidden_states[1], + encoder_attention_mask[1], + past_key_value, + output_attentions, + ) + attention_output = self.output([self_outputs0[0],self_outputs1[0]], hidden_states) + + outputs = (attention_output,) + self_outputs0[1:] # add attentions if we output them + else: + self_outputs = self.self( + hidden_states, + attention_mask, + head_mask, + encoder_hidden_states, + encoder_attention_mask, + past_key_value, + output_attentions, + ) + attention_output = self.output(self_outputs[0], hidden_states) + outputs = (attention_output,) + self_outputs[1:] # add attentions if we output them + return outputs + + +class BertIntermediate(nn.Module): + def __init__(self, config): + super().__init__() + self.dense = nn.Linear(config.hidden_size, config.intermediate_size) + if isinstance(config.hidden_act, str): + self.intermediate_act_fn = ACT2FN[config.hidden_act] + else: + self.intermediate_act_fn = config.hidden_act + + def forward(self, hidden_states): + hidden_states = self.dense(hidden_states) + hidden_states = self.intermediate_act_fn(hidden_states) + return hidden_states + + +class BertOutput(nn.Module): + def __init__(self, config): + super().__init__() + self.dense = nn.Linear(config.intermediate_size, config.hidden_size) + self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) + self.dropout = nn.Dropout(config.hidden_dropout_prob) + + def forward(self, hidden_states, input_tensor): + hidden_states = self.dense(hidden_states) + hidden_states = self.dropout(hidden_states) + hidden_states = self.LayerNorm(hidden_states + input_tensor) + return hidden_states + + +class BertLayer(nn.Module): + def __init__(self, config, layer_num): + super().__init__() + self.config = config + self.chunk_size_feed_forward = config.chunk_size_feed_forward + self.seq_len_dim = 1 + self.attention = BertAttention(config) + self.layer_num = layer_num + if self.config.add_cross_attention: + self.crossattention = BertAttention(config, is_cross_attention=self.config.add_cross_attention, layer_num=layer_num) + self.intermediate = BertIntermediate(config) + self.output = BertOutput(config) + + def forward( + self, + hidden_states, + attention_mask=None, + head_mask=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + past_key_value=None, + output_attentions=False, + mode=None, + ): + # decoder uni-directional self-attention cached key/values tuple is at positions 1,2 + self_attn_past_key_value = past_key_value[:2] if past_key_value is not None else None + self_attention_outputs = self.attention( + hidden_states, + attention_mask, + head_mask, + output_attentions=output_attentions, + past_key_value=self_attn_past_key_value, + ) + attention_output = self_attention_outputs[0] + + outputs = self_attention_outputs[1:-1] + present_key_value = self_attention_outputs[-1] + + if mode=='multimodal': + assert encoder_hidden_states is not None, "encoder_hidden_states must be given for cross-attention layers" + cross_attention_outputs = self.crossattention( + attention_output, + attention_mask, + head_mask, + encoder_hidden_states, + encoder_attention_mask, + output_attentions=output_attentions, + ) + attention_output = cross_attention_outputs[0] + outputs = outputs + cross_attention_outputs[1:-1] # add cross attentions if we output attention weights + layer_output = apply_chunking_to_forward( + self.feed_forward_chunk, self.chunk_size_feed_forward, self.seq_len_dim, attention_output + ) + outputs = (layer_output,) + outputs + + outputs = outputs + (present_key_value,) + + return outputs + + def feed_forward_chunk(self, attention_output): + intermediate_output = self.intermediate(attention_output) + layer_output = self.output(intermediate_output, attention_output) + return layer_output + + +class BertEncoder(nn.Module): + def __init__(self, config): + super().__init__() + self.config = config + self.layer = nn.ModuleList([BertLayer(config,i) for i in range(config.num_hidden_layers)]) + self.gradient_checkpointing = False + + def forward( + self, + hidden_states, + attention_mask=None, + head_mask=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + past_key_values=None, + use_cache=None, + output_attentions=False, + output_hidden_states=False, + return_dict=True, + mode='multimodal', + ): + all_hidden_states = () if output_hidden_states else None + all_self_attentions = () if output_attentions else None + all_cross_attentions = () if output_attentions and self.config.add_cross_attention else None + + next_decoder_cache = () if use_cache else None + + for i in range(self.config.num_hidden_layers): + layer_module = self.layer[i] + if output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states,) + + layer_head_mask = head_mask[i] if head_mask is not None else None + past_key_value = past_key_values[i] if past_key_values is not None else None + + if self.gradient_checkpointing and self.training: + + if use_cache: + logger.warn( + "`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..." + ) + use_cache = False + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs, past_key_value, output_attentions) + + return custom_forward + + layer_outputs = torch.utils.checkpoint.checkpoint( + create_custom_forward(layer_module), + hidden_states, + attention_mask, + layer_head_mask, + encoder_hidden_states, + encoder_attention_mask, + mode=mode, + ) + else: + layer_outputs = layer_module( + hidden_states, + attention_mask, + layer_head_mask, + encoder_hidden_states, + encoder_attention_mask, + past_key_value, + output_attentions, + mode=mode, + ) + + hidden_states = layer_outputs[0] + if use_cache: + next_decoder_cache += (layer_outputs[-1],) + if output_attentions: + all_self_attentions = all_self_attentions + (layer_outputs[1],) + + if output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states,) + + if not return_dict: + return tuple( + v + for v in [ + hidden_states, + next_decoder_cache, + all_hidden_states, + all_self_attentions, + all_cross_attentions, + ] + if v is not None + ) + return BaseModelOutputWithPastAndCrossAttentions( + last_hidden_state=hidden_states, + past_key_values=next_decoder_cache, + hidden_states=all_hidden_states, + attentions=all_self_attentions, + cross_attentions=all_cross_attentions, + ) + + +class BertPooler(nn.Module): + def __init__(self, config): + super().__init__() + self.dense = nn.Linear(config.hidden_size, config.hidden_size) + self.activation = nn.Tanh() + + def forward(self, hidden_states): + # We "pool" the model by simply taking the hidden state corresponding + # to the first token. + first_token_tensor = hidden_states[:, 0] + pooled_output = self.dense(first_token_tensor) + pooled_output = self.activation(pooled_output) + return pooled_output + + +class BertPredictionHeadTransform(nn.Module): + def __init__(self, config): + super().__init__() + self.dense = nn.Linear(config.hidden_size, config.hidden_size) + if isinstance(config.hidden_act, str): + self.transform_act_fn = ACT2FN[config.hidden_act] + else: + self.transform_act_fn = config.hidden_act + self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) + + def forward(self, hidden_states): + hidden_states = self.dense(hidden_states) + hidden_states = self.transform_act_fn(hidden_states) + hidden_states = self.LayerNorm(hidden_states) + return hidden_states + + +class BertLMPredictionHead(nn.Module): + def __init__(self, config): + super().__init__() + self.transform = BertPredictionHeadTransform(config) + + # The output weights are the same as the input embeddings, but there is + # an output-only bias for each token. + self.decoder = nn.Linear(config.hidden_size, config.vocab_size, bias=False) + + self.bias = nn.Parameter(torch.zeros(config.vocab_size)) + + # Need a link between the two variables so that the bias is correctly resized with `resize_token_embeddings` + self.decoder.bias = self.bias + + def forward(self, hidden_states): + hidden_states = self.transform(hidden_states) + hidden_states = self.decoder(hidden_states) + return hidden_states + + +class BertOnlyMLMHead(nn.Module): + def __init__(self, config): + super().__init__() + self.predictions = BertLMPredictionHead(config) + + def forward(self, sequence_output): + prediction_scores = self.predictions(sequence_output) + return prediction_scores + + +class BertPreTrainedModel(PreTrainedModel): + """ + An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained + models. + """ + + config_class = BertConfig + base_model_prefix = "bert" + _keys_to_ignore_on_load_missing = [r"position_ids"] + + def _init_weights(self, module): + """ Initialize the weights """ + if isinstance(module, (nn.Linear, nn.Embedding)): + # Slightly different from the TF version which uses truncated_normal for initialization + # cf https://github.com/pytorch/pytorch/pull/5617 + module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) + elif isinstance(module, nn.LayerNorm): + module.bias.data.zero_() + module.weight.data.fill_(1.0) + if isinstance(module, nn.Linear) and module.bias is not None: + module.bias.data.zero_() + + +class BertModel(BertPreTrainedModel): + """ + The model can behave as an encoder (with only self-attention) as well as a decoder, in which case a layer of + cross-attention is added between the self-attention layers, following the architecture described in `Attention is + all you need `__ by Ashish Vaswani, Noam Shazeer, Niki Parmar, Jakob Uszkoreit, + Llion Jones, Aidan N. Gomez, Lukasz Kaiser and Illia Polosukhin. + argument and :obj:`add_cross_attention` set to :obj:`True`; an :obj:`encoder_hidden_states` is then expected as an + input to the forward pass. + """ + + def __init__(self, config, add_pooling_layer=True): + super().__init__(config) + self.config = config + + self.embeddings = BertEmbeddings(config) + + self.encoder = BertEncoder(config) + + self.pooler = BertPooler(config) if add_pooling_layer else None + + self.init_weights() + + + def get_input_embeddings(self): + return self.embeddings.word_embeddings + + def set_input_embeddings(self, value): + self.embeddings.word_embeddings = value + + def _prune_heads(self, heads_to_prune): + """ + Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base + class PreTrainedModel + """ + for layer, heads in heads_to_prune.items(): + self.encoder.layer[layer].attention.prune_heads(heads) + + + def get_extended_attention_mask(self, attention_mask: Tensor, input_shape: Tuple[int], device: device, is_decoder: bool) -> Tensor: + """ + Makes broadcastable attention and causal masks so that future and masked tokens are ignored. + + Arguments: + attention_mask (:obj:`torch.Tensor`): + Mask with ones indicating tokens to attend to, zeros for tokens to ignore. + input_shape (:obj:`Tuple[int]`): + The shape of the input to the model. + device: (:obj:`torch.device`): + The device of the input to the model. + + Returns: + :obj:`torch.Tensor` The extended attention mask, with a the same dtype as :obj:`attention_mask.dtype`. + """ + # We can provide a self-attention mask of dimensions [batch_size, from_seq_length, to_seq_length] + # ourselves in which case we just need to make it broadcastable to all heads. + if attention_mask.dim() == 3: + extended_attention_mask = attention_mask[:, None, :, :] + elif attention_mask.dim() == 2: + # Provided a padding mask of dimensions [batch_size, seq_length] + # - if the model is a decoder, apply a causal mask in addition to the padding mask + # - if the model is an encoder, make the mask broadcastable to [batch_size, num_heads, seq_length, seq_length] + if is_decoder: + batch_size, seq_length = input_shape + + seq_ids = torch.arange(seq_length, device=device) + causal_mask = seq_ids[None, None, :].repeat(batch_size, seq_length, 1) <= seq_ids[None, :, None] + # in case past_key_values are used we need to add a prefix ones mask to the causal mask + # causal and attention masks must have same type with pytorch version < 1.3 + causal_mask = causal_mask.to(attention_mask.dtype) + + if causal_mask.shape[1] < attention_mask.shape[1]: + prefix_seq_len = attention_mask.shape[1] - causal_mask.shape[1] + causal_mask = torch.cat( + [ + torch.ones((batch_size, seq_length, prefix_seq_len), device=device, dtype=causal_mask.dtype), + causal_mask, + ], + axis=-1, + ) + + extended_attention_mask = causal_mask[:, None, :, :] * attention_mask[:, None, None, :] + else: + extended_attention_mask = attention_mask[:, None, None, :] + else: + raise ValueError( + "Wrong shape for input_ids (shape {}) or attention_mask (shape {})".format( + input_shape, attention_mask.shape + ) + ) + + # Since attention_mask is 1.0 for positions we want to attend and 0.0 for + # masked positions, this operation will create a tensor which is 0.0 for + # positions we want to attend and -10000.0 for masked positions. + # Since we are adding it to the raw scores before the softmax, this is + # effectively the same as removing these entirely. + extended_attention_mask = extended_attention_mask.to(dtype=self.dtype) # fp16 compatibility + extended_attention_mask = (1.0 - extended_attention_mask) * -10000.0 + return extended_attention_mask + + def forward( + self, + input_ids=None, + attention_mask=None, + position_ids=None, + head_mask=None, + inputs_embeds=None, + encoder_embeds=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + past_key_values=None, + use_cache=None, + output_attentions=None, + output_hidden_states=None, + return_dict=None, + is_decoder=False, + mode='multimodal', + ): + r""" + encoder_hidden_states (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`): + Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention if + the model is configured as a decoder. + encoder_attention_mask (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): + Mask to avoid performing attention on the padding token indices of the encoder input. This mask is used in + the cross-attention if the model is configured as a decoder. Mask values selected in ``[0, 1]``: + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + past_key_values (:obj:`tuple(tuple(torch.FloatTensor))` of length :obj:`config.n_layers` with each tuple having 4 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`): + Contains precomputed key and value hidden states of the attention blocks. Can be used to speed up decoding. + If :obj:`past_key_values` are used, the user can optionally input only the last :obj:`decoder_input_ids` + (those that don't have their past key value states given to this model) of shape :obj:`(batch_size, 1)` + instead of all :obj:`decoder_input_ids` of shape :obj:`(batch_size, sequence_length)`. + use_cache (:obj:`bool`, `optional`): + If set to :obj:`True`, :obj:`past_key_values` key value states are returned and can be used to speed up + decoding (see :obj:`past_key_values`). + """ + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + if is_decoder: + use_cache = use_cache if use_cache is not None else self.config.use_cache + else: + use_cache = False + + if input_ids is not None and inputs_embeds is not None: + raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time") + elif input_ids is not None: + input_shape = input_ids.size() + batch_size, seq_length = input_shape + device = input_ids.device + elif inputs_embeds is not None: + input_shape = inputs_embeds.size()[:-1] + batch_size, seq_length = input_shape + device = inputs_embeds.device + elif encoder_embeds is not None: + input_shape = encoder_embeds.size()[:-1] + batch_size, seq_length = input_shape + device = encoder_embeds.device + else: + raise ValueError("You have to specify either input_ids or inputs_embeds or encoder_embeds") + + # past_key_values_length + past_key_values_length = past_key_values[0][0].shape[2] if past_key_values is not None else 0 + + if attention_mask is None: + attention_mask = torch.ones(((batch_size, seq_length + past_key_values_length)), device=device) + + # We can provide a self-attention mask of dimensions [batch_size, from_seq_length, to_seq_length] + # ourselves in which case we just need to make it broadcastable to all heads. + extended_attention_mask: torch.Tensor = self.get_extended_attention_mask(attention_mask, input_shape, + device, is_decoder) + + # If a 2D or 3D attention mask is provided for the cross-attention + # we need to make broadcastable to [batch_size, num_heads, seq_length, seq_length] + if encoder_hidden_states is not None: + if type(encoder_hidden_states) == list: + encoder_batch_size, encoder_sequence_length, _ = encoder_hidden_states[0].size() + else: + encoder_batch_size, encoder_sequence_length, _ = encoder_hidden_states.size() + encoder_hidden_shape = (encoder_batch_size, encoder_sequence_length) + + if type(encoder_attention_mask) == list: + encoder_extended_attention_mask = [self.invert_attention_mask(mask) for mask in encoder_attention_mask] + elif encoder_attention_mask is None: + encoder_attention_mask = torch.ones(encoder_hidden_shape, device=device) + encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask) + else: + encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask) + else: + encoder_extended_attention_mask = None + + # Prepare head mask if needed + # 1.0 in head_mask indicate we keep the head + # attention_probs has shape bsz x n_heads x N x N + # input head_mask has shape [num_heads] or [num_hidden_layers x num_heads] + # and head_mask is converted to shape [num_hidden_layers x batch x num_heads x seq_length x seq_length] + head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers) + + if encoder_embeds is None: + embedding_output = self.embeddings( + input_ids=input_ids, + position_ids=position_ids, + inputs_embeds=inputs_embeds, + past_key_values_length=past_key_values_length, + ) + else: + embedding_output = encoder_embeds + + encoder_outputs = self.encoder( + embedding_output, + attention_mask=extended_attention_mask, + head_mask=head_mask, + encoder_hidden_states=encoder_hidden_states, + encoder_attention_mask=encoder_extended_attention_mask, + past_key_values=past_key_values, + use_cache=use_cache, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + mode=mode, + ) + sequence_output = encoder_outputs[0] + pooled_output = self.pooler(sequence_output) if self.pooler is not None else None + + if not return_dict: + return (sequence_output, pooled_output) + encoder_outputs[1:] + + return BaseModelOutputWithPoolingAndCrossAttentions( + last_hidden_state=sequence_output, + pooler_output=pooled_output, + past_key_values=encoder_outputs.past_key_values, + hidden_states=encoder_outputs.hidden_states, + attentions=encoder_outputs.attentions, + cross_attentions=encoder_outputs.cross_attentions, + ) + diff --git a/extras/BLIP/models/vit.py b/extras/BLIP/models/vit.py new file mode 100644 index 0000000000000000000000000000000000000000..91c0adad7c8728ae01666bfbdd83d5fd040f5be9 --- /dev/null +++ b/extras/BLIP/models/vit.py @@ -0,0 +1,308 @@ +''' + * Copyright (c) 2022, salesforce.com, inc. + * All rights reserved. + * SPDX-License-Identifier: BSD-3-Clause + * For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause + * By Junnan Li + * Based on timm code base + * https://github.com/rwightman/pytorch-image-models/tree/master/timm +''' + +import torch +import torch.nn as nn +import torch.nn.functional as F +from functools import partial + +from timm.models.vision_transformer import _cfg, PatchEmbed +from timm.models.registry import register_model +from timm.models.layers import trunc_normal_, DropPath +from timm.models.helpers import named_apply, adapt_input_conv + + +def checkpoint_wrapper(x): + return x + + +class Mlp(nn.Module): + """ MLP as used in Vision Transformer, MLP-Mixer and related networks + """ + def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.): + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Linear(in_features, hidden_features) + self.act = act_layer() + self.fc2 = nn.Linear(hidden_features, out_features) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + + +class Attention(nn.Module): + def __init__(self, dim, num_heads=8, qkv_bias=False, qk_scale=None, attn_drop=0., proj_drop=0.): + super().__init__() + self.num_heads = num_heads + head_dim = dim // num_heads + # NOTE scale factor was wrong in my original version, can set manually to be compat with prev weights + self.scale = qk_scale or head_dim ** -0.5 + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(dim, dim) + self.proj_drop = nn.Dropout(proj_drop) + self.attn_gradients = None + self.attention_map = None + + def save_attn_gradients(self, attn_gradients): + self.attn_gradients = attn_gradients + + def get_attn_gradients(self): + return self.attn_gradients + + def save_attention_map(self, attention_map): + self.attention_map = attention_map + + def get_attention_map(self): + return self.attention_map + + def forward(self, x, register_hook=False): + B, N, C = x.shape + qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4) + q, k, v = qkv[0], qkv[1], qkv[2] # make torchscript happy (cannot use tensor as tuple) + + attn = (q @ k.transpose(-2, -1)) * self.scale + attn = attn.softmax(dim=-1) + attn = self.attn_drop(attn) + + if register_hook: + self.save_attention_map(attn) + attn.register_hook(self.save_attn_gradients) + + x = (attn @ v).transpose(1, 2).reshape(B, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + + +class Block(nn.Module): + + def __init__(self, dim, num_heads, mlp_ratio=4., qkv_bias=False, qk_scale=None, drop=0., attn_drop=0., + drop_path=0., act_layer=nn.GELU, norm_layer=nn.LayerNorm, use_grad_checkpointing=False): + super().__init__() + self.norm1 = norm_layer(dim) + self.attn = Attention( + dim, num_heads=num_heads, qkv_bias=qkv_bias, qk_scale=qk_scale, attn_drop=attn_drop, proj_drop=drop) + # NOTE: drop path for stochastic depth, we shall see if this is better than dropout here + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.norm2 = norm_layer(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = Mlp(in_features=dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop) + + if use_grad_checkpointing: + self.attn = checkpoint_wrapper(self.attn) + self.mlp = checkpoint_wrapper(self.mlp) + + def forward(self, x, register_hook=False): + x = x + self.drop_path(self.attn(self.norm1(x), register_hook=register_hook)) + x = x + self.drop_path(self.mlp(self.norm2(x))) + return x + + +class VisionTransformer(nn.Module): + """ Vision Transformer + A PyTorch impl of : `An Image is Worth 16x16 Words: Transformers for Image Recognition at Scale` - + https://arxiv.org/abs/2010.11929 + """ + def __init__(self, img_size=224, patch_size=16, in_chans=3, num_classes=1000, embed_dim=768, depth=12, + num_heads=12, mlp_ratio=4., qkv_bias=True, qk_scale=None, representation_size=None, + drop_rate=0., attn_drop_rate=0., drop_path_rate=0., norm_layer=None, + use_grad_checkpointing=False, ckpt_layer=0): + """ + Args: + img_size (int, tuple): input image size + patch_size (int, tuple): patch size + in_chans (int): number of input channels + num_classes (int): number of classes for classification head + embed_dim (int): embedding dimension + depth (int): depth of transformer + num_heads (int): number of attention heads + mlp_ratio (int): ratio of mlp hidden dim to embedding dim + qkv_bias (bool): enable bias for qkv if True + qk_scale (float): override default qk scale of head_dim ** -0.5 if set + representation_size (Optional[int]): enable and set representation layer (pre-logits) to this value if set + drop_rate (float): dropout rate + attn_drop_rate (float): attention dropout rate + drop_path_rate (float): stochastic depth rate + norm_layer: (nn.Module): normalization layer + """ + super().__init__() + self.num_features = self.embed_dim = embed_dim # num_features for consistency with other models + norm_layer = norm_layer or partial(nn.LayerNorm, eps=1e-6) + + self.patch_embed = PatchEmbed( + img_size=img_size, patch_size=patch_size, in_chans=in_chans, embed_dim=embed_dim) + + num_patches = self.patch_embed.num_patches + + self.cls_token = nn.Parameter(torch.zeros(1, 1, embed_dim)) + self.pos_embed = nn.Parameter(torch.zeros(1, num_patches + 1, embed_dim)) + self.pos_drop = nn.Dropout(p=drop_rate) + + dpr = [x.item() for x in torch.linspace(0, drop_path_rate, depth)] # stochastic depth decay rule + self.blocks = nn.ModuleList([ + Block( + dim=embed_dim, num_heads=num_heads, mlp_ratio=mlp_ratio, qkv_bias=qkv_bias, qk_scale=qk_scale, + drop=drop_rate, attn_drop=attn_drop_rate, drop_path=dpr[i], norm_layer=norm_layer, + use_grad_checkpointing=(use_grad_checkpointing and i>=depth-ckpt_layer) + ) + for i in range(depth)]) + self.norm = norm_layer(embed_dim) + + trunc_normal_(self.pos_embed, std=.02) + trunc_normal_(self.cls_token, std=.02) + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + @torch.jit.ignore + def no_weight_decay(self): + return {'pos_embed', 'cls_token'} + + def forward(self, x, register_blk=-1): + B = x.shape[0] + x = self.patch_embed(x) + + cls_tokens = self.cls_token.expand(B, -1, -1) # stole cls_tokens impl from Phil Wang, thanks + x = torch.cat((cls_tokens, x), dim=1) + + x = x + self.pos_embed[:,:x.size(1),:] + x = self.pos_drop(x) + + for i,blk in enumerate(self.blocks): + x = blk(x, register_blk==i) + x = self.norm(x) + + return x + + @torch.jit.ignore() + def load_pretrained(self, checkpoint_path, prefix=''): + _load_weights(self, checkpoint_path, prefix) + + +@torch.no_grad() +def _load_weights(model: VisionTransformer, checkpoint_path: str, prefix: str = ''): + """ Load weights from .npz checkpoints for official Google Brain Flax implementation + """ + import numpy as np + + def _n2p(w, t=True): + if w.ndim == 4 and w.shape[0] == w.shape[1] == w.shape[2] == 1: + w = w.flatten() + if t: + if w.ndim == 4: + w = w.transpose([3, 2, 0, 1]) + elif w.ndim == 3: + w = w.transpose([2, 0, 1]) + elif w.ndim == 2: + w = w.transpose([1, 0]) + return torch.from_numpy(w) + + w = np.load(checkpoint_path) + if not prefix and 'opt/target/embedding/kernel' in w: + prefix = 'opt/target/' + + if hasattr(model.patch_embed, 'backbone'): + # hybrid + backbone = model.patch_embed.backbone + stem_only = not hasattr(backbone, 'stem') + stem = backbone if stem_only else backbone.stem + stem.conv.weight.copy_(adapt_input_conv(stem.conv.weight.shape[1], _n2p(w[f'{prefix}conv_root/kernel']))) + stem.norm.weight.copy_(_n2p(w[f'{prefix}gn_root/scale'])) + stem.norm.bias.copy_(_n2p(w[f'{prefix}gn_root/bias'])) + if not stem_only: + for i, stage in enumerate(backbone.stages): + for j, block in enumerate(stage.blocks): + bp = f'{prefix}block{i + 1}/unit{j + 1}/' + for r in range(3): + getattr(block, f'conv{r + 1}').weight.copy_(_n2p(w[f'{bp}conv{r + 1}/kernel'])) + getattr(block, f'norm{r + 1}').weight.copy_(_n2p(w[f'{bp}gn{r + 1}/scale'])) + getattr(block, f'norm{r + 1}').bias.copy_(_n2p(w[f'{bp}gn{r + 1}/bias'])) + if block.downsample is not None: + block.downsample.conv.weight.copy_(_n2p(w[f'{bp}conv_proj/kernel'])) + block.downsample.norm.weight.copy_(_n2p(w[f'{bp}gn_proj/scale'])) + block.downsample.norm.bias.copy_(_n2p(w[f'{bp}gn_proj/bias'])) + embed_conv_w = _n2p(w[f'{prefix}embedding/kernel']) + else: + embed_conv_w = adapt_input_conv( + model.patch_embed.proj.weight.shape[1], _n2p(w[f'{prefix}embedding/kernel'])) + model.patch_embed.proj.weight.copy_(embed_conv_w) + model.patch_embed.proj.bias.copy_(_n2p(w[f'{prefix}embedding/bias'])) + model.cls_token.copy_(_n2p(w[f'{prefix}cls'], t=False)) + pos_embed_w = _n2p(w[f'{prefix}Transformer/posembed_input/pos_embedding'], t=False) + if pos_embed_w.shape != model.pos_embed.shape: + pos_embed_w = resize_pos_embed( # resize pos embedding when different size from pretrained weights + pos_embed_w, model.pos_embed, getattr(model, 'num_tokens', 1), model.patch_embed.grid_size) + model.pos_embed.copy_(pos_embed_w) + model.norm.weight.copy_(_n2p(w[f'{prefix}Transformer/encoder_norm/scale'])) + model.norm.bias.copy_(_n2p(w[f'{prefix}Transformer/encoder_norm/bias'])) +# if isinstance(model.head, nn.Linear) and model.head.bias.shape[0] == w[f'{prefix}head/bias'].shape[-1]: +# model.head.weight.copy_(_n2p(w[f'{prefix}head/kernel'])) +# model.head.bias.copy_(_n2p(w[f'{prefix}head/bias'])) +# if isinstance(getattr(model.pre_logits, 'fc', None), nn.Linear) and f'{prefix}pre_logits/bias' in w: +# model.pre_logits.fc.weight.copy_(_n2p(w[f'{prefix}pre_logits/kernel'])) +# model.pre_logits.fc.bias.copy_(_n2p(w[f'{prefix}pre_logits/bias'])) + for i, block in enumerate(model.blocks.children()): + block_prefix = f'{prefix}Transformer/encoderblock_{i}/' + mha_prefix = block_prefix + 'MultiHeadDotProductAttention_1/' + block.norm1.weight.copy_(_n2p(w[f'{block_prefix}LayerNorm_0/scale'])) + block.norm1.bias.copy_(_n2p(w[f'{block_prefix}LayerNorm_0/bias'])) + block.attn.qkv.weight.copy_(torch.cat([ + _n2p(w[f'{mha_prefix}{n}/kernel'], t=False).flatten(1).T for n in ('query', 'key', 'value')])) + block.attn.qkv.bias.copy_(torch.cat([ + _n2p(w[f'{mha_prefix}{n}/bias'], t=False).reshape(-1) for n in ('query', 'key', 'value')])) + block.attn.proj.weight.copy_(_n2p(w[f'{mha_prefix}out/kernel']).flatten(1)) + block.attn.proj.bias.copy_(_n2p(w[f'{mha_prefix}out/bias'])) + for r in range(2): + getattr(block.mlp, f'fc{r + 1}').weight.copy_(_n2p(w[f'{block_prefix}MlpBlock_3/Dense_{r}/kernel'])) + getattr(block.mlp, f'fc{r + 1}').bias.copy_(_n2p(w[f'{block_prefix}MlpBlock_3/Dense_{r}/bias'])) + block.norm2.weight.copy_(_n2p(w[f'{block_prefix}LayerNorm_2/scale'])) + block.norm2.bias.copy_(_n2p(w[f'{block_prefix}LayerNorm_2/bias'])) + + +def interpolate_pos_embed(pos_embed_checkpoint, visual_encoder): + # interpolate position embedding + embedding_size = pos_embed_checkpoint.shape[-1] + num_patches = visual_encoder.patch_embed.num_patches + num_extra_tokens = visual_encoder.pos_embed.shape[-2] - num_patches + # height (== width) for the checkpoint position embedding + orig_size = int((pos_embed_checkpoint.shape[-2] - num_extra_tokens) ** 0.5) + # height (== width) for the new position embedding + new_size = int(num_patches ** 0.5) + + if orig_size!=new_size: + # class_token and dist_token are kept unchanged + extra_tokens = pos_embed_checkpoint[:, :num_extra_tokens] + # only the position tokens are interpolated + pos_tokens = pos_embed_checkpoint[:, num_extra_tokens:] + pos_tokens = pos_tokens.reshape(-1, orig_size, orig_size, embedding_size).permute(0, 3, 1, 2) + pos_tokens = torch.nn.functional.interpolate( + pos_tokens, size=(new_size, new_size), mode='bicubic', align_corners=False) + pos_tokens = pos_tokens.permute(0, 2, 3, 1).flatten(1, 2) + new_pos_embed = torch.cat((extra_tokens, pos_tokens), dim=1) + print('reshape position embedding from %d to %d'%(orig_size ** 2,new_size ** 2)) + + return new_pos_embed + else: + return pos_embed_checkpoint \ No newline at end of file diff --git a/extras/__pycache__/expansion.cpython-310.pyc b/extras/__pycache__/expansion.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0fb39264efb47956d1a8c4f1c92ce82f50c1f2e7 Binary files /dev/null and b/extras/__pycache__/expansion.cpython-310.pyc differ diff --git a/extras/__pycache__/face_crop.cpython-310.pyc b/extras/__pycache__/face_crop.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7aaf89aa2690050150436749488e28dcdfefc209 Binary files /dev/null and b/extras/__pycache__/face_crop.cpython-310.pyc differ diff --git a/extras/__pycache__/interrogate.cpython-310.pyc b/extras/__pycache__/interrogate.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..22e2ff3a34649e8c80aea52b593153a0eb848c4c Binary files /dev/null and b/extras/__pycache__/interrogate.cpython-310.pyc differ diff --git a/extras/__pycache__/ip_adapter.cpython-310.pyc b/extras/__pycache__/ip_adapter.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c568bdeecc978831305aba76e255695cc3ab5844 Binary files /dev/null and b/extras/__pycache__/ip_adapter.cpython-310.pyc differ diff --git a/extras/__pycache__/preprocessors.cpython-310.pyc b/extras/__pycache__/preprocessors.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fddec87baf6e73a6e97c8b22e362c7c09b96e24b Binary files /dev/null and b/extras/__pycache__/preprocessors.cpython-310.pyc differ diff --git a/extras/__pycache__/resampler.cpython-310.pyc b/extras/__pycache__/resampler.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3c1cd117156d7605be8c98f40be237f66601d71b Binary files /dev/null and b/extras/__pycache__/resampler.cpython-310.pyc differ diff --git a/extras/__pycache__/vae_interpose.cpython-310.pyc b/extras/__pycache__/vae_interpose.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..27d24762ae8d9948e82b250329775921565a401a Binary files /dev/null and b/extras/__pycache__/vae_interpose.cpython-310.pyc differ diff --git a/extras/__pycache__/wd14tagger.cpython-310.pyc b/extras/__pycache__/wd14tagger.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cf434c52a2765a1fbc39299d32c2509ac891e762 Binary files /dev/null and b/extras/__pycache__/wd14tagger.cpython-310.pyc differ diff --git a/extras/expansion.py b/extras/expansion.py new file mode 100644 index 0000000000000000000000000000000000000000..34c1ee8d4e716cae5662055d703426e63c71b8e5 --- /dev/null +++ b/extras/expansion.py @@ -0,0 +1,129 @@ +# Fooocus GPT2 Expansion +# Algorithm created by Lvmin Zhang at 2023, Stanford +# If used inside Fooocus, any use is permitted. +# If used outside Fooocus, only non-commercial use is permitted (CC-By NC 4.0). +# This applies to the word list, vocab, model, and algorithm. + + +import os +import torch +import math +import ldm_patched.modules.model_management as model_management + +from transformers.generation.logits_process import LogitsProcessorList +from transformers import AutoTokenizer, AutoModelForCausalLM, set_seed +from modules.config import path_fooocus_expansion +from ldm_patched.modules.model_patcher import ModelPatcher + + +# limitation of np.random.seed(), called from transformers.set_seed() +SEED_LIMIT_NUMPY = 2**32 +neg_inf = - 8192.0 + + +def safe_str(x): + x = str(x) + for _ in range(16): + x = x.replace(' ', ' ') + return x.strip(",. \r\n") + + +def remove_pattern(x, pattern): + for p in pattern: + x = x.replace(p, '') + return x + + +class FooocusExpansion: + def __init__(self): + self.tokenizer = AutoTokenizer.from_pretrained(path_fooocus_expansion) + + positive_words = open(os.path.join(path_fooocus_expansion, 'positive.txt'), + encoding='utf-8').read().splitlines() + positive_words = ['Ġ' + x.lower() for x in positive_words if x != ''] + + self.logits_bias = torch.zeros((1, len(self.tokenizer.vocab)), dtype=torch.float32) + neg_inf + + debug_list = [] + for k, v in self.tokenizer.vocab.items(): + if k in positive_words: + self.logits_bias[0, v] = 0 + debug_list.append(k[1:]) + + print(f'Fooocus V2 Expansion: Vocab with {len(debug_list)} words.') + + # debug_list = '\n'.join(sorted(debug_list)) + # print(debug_list) + + # t11 = self.tokenizer(',', return_tensors="np") + # t198 = self.tokenizer('\n', return_tensors="np") + # eos = self.tokenizer.eos_token_id + + self.model = AutoModelForCausalLM.from_pretrained(path_fooocus_expansion) + self.model.eval() + + load_device = model_management.text_encoder_device() + offload_device = model_management.text_encoder_offload_device() + + # MPS hack + if model_management.is_device_mps(load_device): + load_device = torch.device('cpu') + offload_device = torch.device('cpu') + + use_fp16 = model_management.should_use_fp16(device=load_device) + + if use_fp16: + self.model.half() + + self.patcher = ModelPatcher(self.model, load_device=load_device, offload_device=offload_device) + print(f'Fooocus Expansion engine loaded for {load_device}, use_fp16 = {use_fp16}.') + + @torch.no_grad() + @torch.inference_mode() + def logits_processor(self, input_ids, scores): + assert scores.ndim == 2 and scores.shape[0] == 1 + self.logits_bias = self.logits_bias.to(scores) + + bias = self.logits_bias.clone() + bias[0, input_ids[0].to(bias.device).long()] = neg_inf + bias[0, 11] = 0 + + return scores + bias + + @torch.no_grad() + @torch.inference_mode() + def __call__(self, prompt, seed): + if prompt == '': + return '' + + if self.patcher.current_device != self.patcher.load_device: + print('Fooocus Expansion loaded by itself.') + model_management.load_model_gpu(self.patcher) + + seed = int(seed) % SEED_LIMIT_NUMPY + set_seed(seed) + prompt = safe_str(prompt) + ',' + + tokenized_kwargs = self.tokenizer(prompt, return_tensors="pt") + tokenized_kwargs.data['input_ids'] = tokenized_kwargs.data['input_ids'].to(self.patcher.load_device) + tokenized_kwargs.data['attention_mask'] = tokenized_kwargs.data['attention_mask'].to(self.patcher.load_device) + + current_token_length = int(tokenized_kwargs.data['input_ids'].shape[1]) + max_token_length = 75 * int(math.ceil(float(current_token_length) / 75.0)) + max_new_tokens = max_token_length - current_token_length + + if max_new_tokens == 0: + return prompt[:-1] + + # https://huggingface.co/blog/introducing-csearch + # https://huggingface.co/docs/transformers/generation_strategies + features = self.model.generate(**tokenized_kwargs, + top_k=100, + max_new_tokens=max_new_tokens, + do_sample=True, + logits_processor=LogitsProcessorList([self.logits_processor])) + + response = self.tokenizer.batch_decode(features, skip_special_tokens=True) + result = safe_str(response[0]) + + return result diff --git a/extras/face_crop.py b/extras/face_crop.py new file mode 100644 index 0000000000000000000000000000000000000000..d4da7e81d59334ecbd4d710d66a454cab796b349 --- /dev/null +++ b/extras/face_crop.py @@ -0,0 +1,50 @@ +import cv2 +import numpy as np +import modules.config + + +faceRestoreHelper = None + + +def align_warp_face(self, landmark, border_mode='constant'): + affine_matrix = cv2.estimateAffinePartial2D(landmark, self.face_template, method=cv2.LMEDS)[0] + self.affine_matrices.append(affine_matrix) + if border_mode == 'constant': + border_mode = cv2.BORDER_CONSTANT + elif border_mode == 'reflect101': + border_mode = cv2.BORDER_REFLECT101 + elif border_mode == 'reflect': + border_mode = cv2.BORDER_REFLECT + input_img = self.input_img + cropped_face = cv2.warpAffine(input_img, affine_matrix, self.face_size, + borderMode=border_mode, borderValue=(135, 133, 132)) + return cropped_face + + +def crop_image(img_rgb): + global faceRestoreHelper + + if faceRestoreHelper is None: + from extras.facexlib.utils.face_restoration_helper import FaceRestoreHelper + faceRestoreHelper = FaceRestoreHelper( + upscale_factor=1, + model_rootpath=modules.config.path_controlnet, + device='cpu' # use cpu is safer since we are out of memory management + ) + + faceRestoreHelper.clean_all() + faceRestoreHelper.read_image(np.ascontiguousarray(img_rgb[:, :, ::-1].copy())) + faceRestoreHelper.get_face_landmarks_5() + + landmarks = faceRestoreHelper.all_landmarks_5 + # landmarks are already sorted with confidence. + + if len(landmarks) == 0: + print('No face detected') + return img_rgb + else: + print(f'Detected {len(landmarks)} faces') + + result = align_warp_face(faceRestoreHelper, landmarks[0]) + + return np.ascontiguousarray(result[:, :, ::-1].copy()) diff --git a/extras/facexlib/detection/__init__.py b/extras/facexlib/detection/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..4e52fd74e56c74d6c8766c8ca4170863bcd215e0 --- /dev/null +++ b/extras/facexlib/detection/__init__.py @@ -0,0 +1,31 @@ +import torch +from copy import deepcopy + +from extras.facexlib.utils import load_file_from_url +from .retinaface import RetinaFace + + +def init_detection_model(model_name, half=False, device='cuda', model_rootpath=None): + if model_name == 'retinaface_resnet50': + model = RetinaFace(network_name='resnet50', half=half, device=device) + model_url = 'https://github.com/xinntao/facexlib/releases/download/v0.1.0/detection_Resnet50_Final.pth' + elif model_name == 'retinaface_mobile0.25': + model = RetinaFace(network_name='mobile0.25', half=half, device=device) + model_url = 'https://github.com/xinntao/facexlib/releases/download/v0.1.0/detection_mobilenet0.25_Final.pth' + else: + raise NotImplementedError(f'{model_name} is not implemented.') + + model_path = load_file_from_url( + url=model_url, model_dir='facexlib/weights', progress=True, file_name=None, save_dir=model_rootpath) + + # TODO: clean pretrained model + load_net = torch.load(model_path, map_location=lambda storage, loc: storage) + # remove unnecessary 'module.' + for k, v in deepcopy(load_net).items(): + if k.startswith('module.'): + load_net[k[7:]] = v + load_net.pop(k) + model.load_state_dict(load_net, strict=True) + model.eval() + model = model.to(device) + return model diff --git a/extras/facexlib/detection/__pycache__/__init__.cpython-310.pyc b/extras/facexlib/detection/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..664744200c4f693a72e4dcda24e4b2d9959d6c91 Binary files /dev/null and b/extras/facexlib/detection/__pycache__/__init__.cpython-310.pyc differ diff --git a/extras/facexlib/detection/__pycache__/align_trans.cpython-310.pyc b/extras/facexlib/detection/__pycache__/align_trans.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d2a5604a2360e4bcc6409b1ed73d8e6aad9b7bc9 Binary files /dev/null and b/extras/facexlib/detection/__pycache__/align_trans.cpython-310.pyc differ diff --git a/extras/facexlib/detection/__pycache__/matlab_cp2tform.cpython-310.pyc b/extras/facexlib/detection/__pycache__/matlab_cp2tform.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..05d2c02dd4011dbc59bda7ccf739a5c55923fc8f Binary files /dev/null and b/extras/facexlib/detection/__pycache__/matlab_cp2tform.cpython-310.pyc differ diff --git a/extras/facexlib/detection/__pycache__/retinaface.cpython-310.pyc b/extras/facexlib/detection/__pycache__/retinaface.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..00495dd75f97db6e7588d9fb30fee0cc5d5d28ea Binary files /dev/null and b/extras/facexlib/detection/__pycache__/retinaface.cpython-310.pyc differ diff --git a/extras/facexlib/detection/__pycache__/retinaface_net.cpython-310.pyc b/extras/facexlib/detection/__pycache__/retinaface_net.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f7c10d9e807e7971f89643f14be46f5c3c5d2bbf Binary files /dev/null and b/extras/facexlib/detection/__pycache__/retinaface_net.cpython-310.pyc differ diff --git a/extras/facexlib/detection/__pycache__/retinaface_utils.cpython-310.pyc b/extras/facexlib/detection/__pycache__/retinaface_utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..94823532b3c3832193497569402c0aa9ff8f44a7 Binary files /dev/null and b/extras/facexlib/detection/__pycache__/retinaface_utils.cpython-310.pyc differ diff --git a/extras/facexlib/detection/align_trans.py b/extras/facexlib/detection/align_trans.py new file mode 100644 index 0000000000000000000000000000000000000000..07f1eb365462c2ec5bbac6d1854c786b6fd6be90 --- /dev/null +++ b/extras/facexlib/detection/align_trans.py @@ -0,0 +1,219 @@ +import cv2 +import numpy as np + +from .matlab_cp2tform import get_similarity_transform_for_cv2 + +# reference facial points, a list of coordinates (x,y) +REFERENCE_FACIAL_POINTS = [[30.29459953, 51.69630051], [65.53179932, 51.50139999], [48.02519989, 71.73660278], + [33.54930115, 92.3655014], [62.72990036, 92.20410156]] + +DEFAULT_CROP_SIZE = (96, 112) + + +class FaceWarpException(Exception): + + def __str__(self): + return 'In File {}:{}'.format(__file__, super.__str__(self)) + + +def get_reference_facial_points(output_size=None, inner_padding_factor=0.0, outer_padding=(0, 0), default_square=False): + """ + Function: + ---------- + get reference 5 key points according to crop settings: + 0. Set default crop_size: + if default_square: + crop_size = (112, 112) + else: + crop_size = (96, 112) + 1. Pad the crop_size by inner_padding_factor in each side; + 2. Resize crop_size into (output_size - outer_padding*2), + pad into output_size with outer_padding; + 3. Output reference_5point; + Parameters: + ---------- + @output_size: (w, h) or None + size of aligned face image + @inner_padding_factor: (w_factor, h_factor) + padding factor for inner (w, h) + @outer_padding: (w_pad, h_pad) + each row is a pair of coordinates (x, y) + @default_square: True or False + if True: + default crop_size = (112, 112) + else: + default crop_size = (96, 112); + !!! make sure, if output_size is not None: + (output_size - outer_padding) + = some_scale * (default crop_size * (1.0 + + inner_padding_factor)) + Returns: + ---------- + @reference_5point: 5x2 np.array + each row is a pair of transformed coordinates (x, y) + """ + + tmp_5pts = np.array(REFERENCE_FACIAL_POINTS) + tmp_crop_size = np.array(DEFAULT_CROP_SIZE) + + # 0) make the inner region a square + if default_square: + size_diff = max(tmp_crop_size) - tmp_crop_size + tmp_5pts += size_diff / 2 + tmp_crop_size += size_diff + + if (output_size and output_size[0] == tmp_crop_size[0] and output_size[1] == tmp_crop_size[1]): + + return tmp_5pts + + if (inner_padding_factor == 0 and outer_padding == (0, 0)): + if output_size is None: + return tmp_5pts + else: + raise FaceWarpException('No paddings to do, output_size must be None or {}'.format(tmp_crop_size)) + + # check output size + if not (0 <= inner_padding_factor <= 1.0): + raise FaceWarpException('Not (0 <= inner_padding_factor <= 1.0)') + + if ((inner_padding_factor > 0 or outer_padding[0] > 0 or outer_padding[1] > 0) and output_size is None): + output_size = tmp_crop_size * \ + (1 + inner_padding_factor * 2).astype(np.int32) + output_size += np.array(outer_padding) + if not (outer_padding[0] < output_size[0] and outer_padding[1] < output_size[1]): + raise FaceWarpException('Not (outer_padding[0] < output_size[0] and outer_padding[1] < output_size[1])') + + # 1) pad the inner region according inner_padding_factor + if inner_padding_factor > 0: + size_diff = tmp_crop_size * inner_padding_factor * 2 + tmp_5pts += size_diff / 2 + tmp_crop_size += np.round(size_diff).astype(np.int32) + + # 2) resize the padded inner region + size_bf_outer_pad = np.array(output_size) - np.array(outer_padding) * 2 + + if size_bf_outer_pad[0] * tmp_crop_size[1] != size_bf_outer_pad[1] * tmp_crop_size[0]: + raise FaceWarpException('Must have (output_size - outer_padding)' + '= some_scale * (crop_size * (1.0 + inner_padding_factor)') + + scale_factor = size_bf_outer_pad[0].astype(np.float32) / tmp_crop_size[0] + tmp_5pts = tmp_5pts * scale_factor + # size_diff = tmp_crop_size * (scale_factor - min(scale_factor)) + # tmp_5pts = tmp_5pts + size_diff / 2 + tmp_crop_size = size_bf_outer_pad + + # 3) add outer_padding to make output_size + reference_5point = tmp_5pts + np.array(outer_padding) + tmp_crop_size = output_size + + return reference_5point + + +def get_affine_transform_matrix(src_pts, dst_pts): + """ + Function: + ---------- + get affine transform matrix 'tfm' from src_pts to dst_pts + Parameters: + ---------- + @src_pts: Kx2 np.array + source points matrix, each row is a pair of coordinates (x, y) + @dst_pts: Kx2 np.array + destination points matrix, each row is a pair of coordinates (x, y) + Returns: + ---------- + @tfm: 2x3 np.array + transform matrix from src_pts to dst_pts + """ + + tfm = np.float32([[1, 0, 0], [0, 1, 0]]) + n_pts = src_pts.shape[0] + ones = np.ones((n_pts, 1), src_pts.dtype) + src_pts_ = np.hstack([src_pts, ones]) + dst_pts_ = np.hstack([dst_pts, ones]) + + A, res, rank, s = np.linalg.lstsq(src_pts_, dst_pts_) + + if rank == 3: + tfm = np.float32([[A[0, 0], A[1, 0], A[2, 0]], [A[0, 1], A[1, 1], A[2, 1]]]) + elif rank == 2: + tfm = np.float32([[A[0, 0], A[1, 0], 0], [A[0, 1], A[1, 1], 0]]) + + return tfm + + +def warp_and_crop_face(src_img, facial_pts, reference_pts=None, crop_size=(96, 112), align_type='smilarity'): + """ + Function: + ---------- + apply affine transform 'trans' to uv + Parameters: + ---------- + @src_img: 3x3 np.array + input image + @facial_pts: could be + 1)a list of K coordinates (x,y) + or + 2) Kx2 or 2xK np.array + each row or col is a pair of coordinates (x, y) + @reference_pts: could be + 1) a list of K coordinates (x,y) + or + 2) Kx2 or 2xK np.array + each row or col is a pair of coordinates (x, y) + or + 3) None + if None, use default reference facial points + @crop_size: (w, h) + output face image size + @align_type: transform type, could be one of + 1) 'similarity': use similarity transform + 2) 'cv2_affine': use the first 3 points to do affine transform, + by calling cv2.getAffineTransform() + 3) 'affine': use all points to do affine transform + Returns: + ---------- + @face_img: output face image with size (w, h) = @crop_size + """ + + if reference_pts is None: + if crop_size[0] == 96 and crop_size[1] == 112: + reference_pts = REFERENCE_FACIAL_POINTS + else: + default_square = False + inner_padding_factor = 0 + outer_padding = (0, 0) + output_size = crop_size + + reference_pts = get_reference_facial_points(output_size, inner_padding_factor, outer_padding, + default_square) + + ref_pts = np.float32(reference_pts) + ref_pts_shp = ref_pts.shape + if max(ref_pts_shp) < 3 or min(ref_pts_shp) != 2: + raise FaceWarpException('reference_pts.shape must be (K,2) or (2,K) and K>2') + + if ref_pts_shp[0] == 2: + ref_pts = ref_pts.T + + src_pts = np.float32(facial_pts) + src_pts_shp = src_pts.shape + if max(src_pts_shp) < 3 or min(src_pts_shp) != 2: + raise FaceWarpException('facial_pts.shape must be (K,2) or (2,K) and K>2') + + if src_pts_shp[0] == 2: + src_pts = src_pts.T + + if src_pts.shape != ref_pts.shape: + raise FaceWarpException('facial_pts and reference_pts must have the same shape') + + if align_type == 'cv2_affine': + tfm = cv2.getAffineTransform(src_pts[0:3], ref_pts[0:3]) + elif align_type == 'affine': + tfm = get_affine_transform_matrix(src_pts, ref_pts) + else: + tfm = get_similarity_transform_for_cv2(src_pts, ref_pts) + + face_img = cv2.warpAffine(src_img, tfm, (crop_size[0], crop_size[1])) + + return face_img diff --git a/extras/facexlib/detection/matlab_cp2tform.py b/extras/facexlib/detection/matlab_cp2tform.py new file mode 100644 index 0000000000000000000000000000000000000000..b2a8b54a91709c71437e15c68d3be9a9b0a20a34 --- /dev/null +++ b/extras/facexlib/detection/matlab_cp2tform.py @@ -0,0 +1,317 @@ +import numpy as np +from numpy.linalg import inv, lstsq +from numpy.linalg import matrix_rank as rank +from numpy.linalg import norm + + +class MatlabCp2tormException(Exception): + + def __str__(self): + return 'In File {}:{}'.format(__file__, super.__str__(self)) + + +def tformfwd(trans, uv): + """ + Function: + ---------- + apply affine transform 'trans' to uv + + Parameters: + ---------- + @trans: 3x3 np.array + transform matrix + @uv: Kx2 np.array + each row is a pair of coordinates (x, y) + + Returns: + ---------- + @xy: Kx2 np.array + each row is a pair of transformed coordinates (x, y) + """ + uv = np.hstack((uv, np.ones((uv.shape[0], 1)))) + xy = np.dot(uv, trans) + xy = xy[:, 0:-1] + return xy + + +def tforminv(trans, uv): + """ + Function: + ---------- + apply the inverse of affine transform 'trans' to uv + + Parameters: + ---------- + @trans: 3x3 np.array + transform matrix + @uv: Kx2 np.array + each row is a pair of coordinates (x, y) + + Returns: + ---------- + @xy: Kx2 np.array + each row is a pair of inverse-transformed coordinates (x, y) + """ + Tinv = inv(trans) + xy = tformfwd(Tinv, uv) + return xy + + +def findNonreflectiveSimilarity(uv, xy, options=None): + options = {'K': 2} + + K = options['K'] + M = xy.shape[0] + x = xy[:, 0].reshape((-1, 1)) # use reshape to keep a column vector + y = xy[:, 1].reshape((-1, 1)) # use reshape to keep a column vector + + tmp1 = np.hstack((x, y, np.ones((M, 1)), np.zeros((M, 1)))) + tmp2 = np.hstack((y, -x, np.zeros((M, 1)), np.ones((M, 1)))) + X = np.vstack((tmp1, tmp2)) + + u = uv[:, 0].reshape((-1, 1)) # use reshape to keep a column vector + v = uv[:, 1].reshape((-1, 1)) # use reshape to keep a column vector + U = np.vstack((u, v)) + + # We know that X * r = U + if rank(X) >= 2 * K: + r, _, _, _ = lstsq(X, U, rcond=-1) + r = np.squeeze(r) + else: + raise Exception('cp2tform:twoUniquePointsReq') + sc = r[0] + ss = r[1] + tx = r[2] + ty = r[3] + + Tinv = np.array([[sc, -ss, 0], [ss, sc, 0], [tx, ty, 1]]) + T = inv(Tinv) + T[:, 2] = np.array([0, 0, 1]) + + return T, Tinv + + +def findSimilarity(uv, xy, options=None): + options = {'K': 2} + + # uv = np.array(uv) + # xy = np.array(xy) + + # Solve for trans1 + trans1, trans1_inv = findNonreflectiveSimilarity(uv, xy, options) + + # Solve for trans2 + + # manually reflect the xy data across the Y-axis + xyR = xy + xyR[:, 0] = -1 * xyR[:, 0] + + trans2r, trans2r_inv = findNonreflectiveSimilarity(uv, xyR, options) + + # manually reflect the tform to undo the reflection done on xyR + TreflectY = np.array([[-1, 0, 0], [0, 1, 0], [0, 0, 1]]) + + trans2 = np.dot(trans2r, TreflectY) + + # Figure out if trans1 or trans2 is better + xy1 = tformfwd(trans1, uv) + norm1 = norm(xy1 - xy) + + xy2 = tformfwd(trans2, uv) + norm2 = norm(xy2 - xy) + + if norm1 <= norm2: + return trans1, trans1_inv + else: + trans2_inv = inv(trans2) + return trans2, trans2_inv + + +def get_similarity_transform(src_pts, dst_pts, reflective=True): + """ + Function: + ---------- + Find Similarity Transform Matrix 'trans': + u = src_pts[:, 0] + v = src_pts[:, 1] + x = dst_pts[:, 0] + y = dst_pts[:, 1] + [x, y, 1] = [u, v, 1] * trans + + Parameters: + ---------- + @src_pts: Kx2 np.array + source points, each row is a pair of coordinates (x, y) + @dst_pts: Kx2 np.array + destination points, each row is a pair of transformed + coordinates (x, y) + @reflective: True or False + if True: + use reflective similarity transform + else: + use non-reflective similarity transform + + Returns: + ---------- + @trans: 3x3 np.array + transform matrix from uv to xy + trans_inv: 3x3 np.array + inverse of trans, transform matrix from xy to uv + """ + + if reflective: + trans, trans_inv = findSimilarity(src_pts, dst_pts) + else: + trans, trans_inv = findNonreflectiveSimilarity(src_pts, dst_pts) + + return trans, trans_inv + + +def cvt_tform_mat_for_cv2(trans): + """ + Function: + ---------- + Convert Transform Matrix 'trans' into 'cv2_trans' which could be + directly used by cv2.warpAffine(): + u = src_pts[:, 0] + v = src_pts[:, 1] + x = dst_pts[:, 0] + y = dst_pts[:, 1] + [x, y].T = cv_trans * [u, v, 1].T + + Parameters: + ---------- + @trans: 3x3 np.array + transform matrix from uv to xy + + Returns: + ---------- + @cv2_trans: 2x3 np.array + transform matrix from src_pts to dst_pts, could be directly used + for cv2.warpAffine() + """ + cv2_trans = trans[:, 0:2].T + + return cv2_trans + + +def get_similarity_transform_for_cv2(src_pts, dst_pts, reflective=True): + """ + Function: + ---------- + Find Similarity Transform Matrix 'cv2_trans' which could be + directly used by cv2.warpAffine(): + u = src_pts[:, 0] + v = src_pts[:, 1] + x = dst_pts[:, 0] + y = dst_pts[:, 1] + [x, y].T = cv_trans * [u, v, 1].T + + Parameters: + ---------- + @src_pts: Kx2 np.array + source points, each row is a pair of coordinates (x, y) + @dst_pts: Kx2 np.array + destination points, each row is a pair of transformed + coordinates (x, y) + reflective: True or False + if True: + use reflective similarity transform + else: + use non-reflective similarity transform + + Returns: + ---------- + @cv2_trans: 2x3 np.array + transform matrix from src_pts to dst_pts, could be directly used + for cv2.warpAffine() + """ + trans, trans_inv = get_similarity_transform(src_pts, dst_pts, reflective) + cv2_trans = cvt_tform_mat_for_cv2(trans) + + return cv2_trans + + +if __name__ == '__main__': + """ + u = [0, 6, -2] + v = [0, 3, 5] + x = [-1, 0, 4] + y = [-1, -10, 4] + + # In Matlab, run: + # + # uv = [u'; v']; + # xy = [x'; y']; + # tform_sim=cp2tform(uv,xy,'similarity'); + # + # trans = tform_sim.tdata.T + # ans = + # -0.0764 -1.6190 0 + # 1.6190 -0.0764 0 + # -3.2156 0.0290 1.0000 + # trans_inv = tform_sim.tdata.Tinv + # ans = + # + # -0.0291 0.6163 0 + # -0.6163 -0.0291 0 + # -0.0756 1.9826 1.0000 + # xy_m=tformfwd(tform_sim, u,v) + # + # xy_m = + # + # -3.2156 0.0290 + # 1.1833 -9.9143 + # 5.0323 2.8853 + # uv_m=tforminv(tform_sim, x,y) + # + # uv_m = + # + # 0.5698 1.3953 + # 6.0872 2.2733 + # -2.6570 4.3314 + """ + u = [0, 6, -2] + v = [0, 3, 5] + x = [-1, 0, 4] + y = [-1, -10, 4] + + uv = np.array((u, v)).T + xy = np.array((x, y)).T + + print('\n--->uv:') + print(uv) + print('\n--->xy:') + print(xy) + + trans, trans_inv = get_similarity_transform(uv, xy) + + print('\n--->trans matrix:') + print(trans) + + print('\n--->trans_inv matrix:') + print(trans_inv) + + print('\n---> apply transform to uv') + print('\nxy_m = uv_augmented * trans') + uv_aug = np.hstack((uv, np.ones((uv.shape[0], 1)))) + xy_m = np.dot(uv_aug, trans) + print(xy_m) + + print('\nxy_m = tformfwd(trans, uv)') + xy_m = tformfwd(trans, uv) + print(xy_m) + + print('\n---> apply inverse transform to xy') + print('\nuv_m = xy_augmented * trans_inv') + xy_aug = np.hstack((xy, np.ones((xy.shape[0], 1)))) + uv_m = np.dot(xy_aug, trans_inv) + print(uv_m) + + print('\nuv_m = tformfwd(trans_inv, xy)') + uv_m = tformfwd(trans_inv, xy) + print(uv_m) + + uv_m = tforminv(trans, xy) + print('\nuv_m = tforminv(trans, xy)') + print(uv_m) diff --git a/extras/facexlib/detection/retinaface.py b/extras/facexlib/detection/retinaface.py new file mode 100644 index 0000000000000000000000000000000000000000..5e0b4f0a5962741915e38efabe07e4cf5a99bc3b --- /dev/null +++ b/extras/facexlib/detection/retinaface.py @@ -0,0 +1,366 @@ +import cv2 +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +from PIL import Image +from torchvision.models._utils import IntermediateLayerGetter as IntermediateLayerGetter + +from extras.facexlib.detection.align_trans import get_reference_facial_points, warp_and_crop_face +from extras.facexlib.detection.retinaface_net import FPN, SSH, MobileNetV1, make_bbox_head, make_class_head, make_landmark_head +from extras.facexlib.detection.retinaface_utils import (PriorBox, batched_decode, batched_decode_landm, decode, decode_landm, + py_cpu_nms) + + +def generate_config(network_name): + + cfg_mnet = { + 'name': 'mobilenet0.25', + 'min_sizes': [[16, 32], [64, 128], [256, 512]], + 'steps': [8, 16, 32], + 'variance': [0.1, 0.2], + 'clip': False, + 'loc_weight': 2.0, + 'gpu_train': True, + 'batch_size': 32, + 'ngpu': 1, + 'epoch': 250, + 'decay1': 190, + 'decay2': 220, + 'image_size': 640, + 'return_layers': { + 'stage1': 1, + 'stage2': 2, + 'stage3': 3 + }, + 'in_channel': 32, + 'out_channel': 64 + } + + cfg_re50 = { + 'name': 'Resnet50', + 'min_sizes': [[16, 32], [64, 128], [256, 512]], + 'steps': [8, 16, 32], + 'variance': [0.1, 0.2], + 'clip': False, + 'loc_weight': 2.0, + 'gpu_train': True, + 'batch_size': 24, + 'ngpu': 4, + 'epoch': 100, + 'decay1': 70, + 'decay2': 90, + 'image_size': 840, + 'return_layers': { + 'layer2': 1, + 'layer3': 2, + 'layer4': 3 + }, + 'in_channel': 256, + 'out_channel': 256 + } + + if network_name == 'mobile0.25': + return cfg_mnet + elif network_name == 'resnet50': + return cfg_re50 + else: + raise NotImplementedError(f'network_name={network_name}') + + +class RetinaFace(nn.Module): + + def __init__(self, network_name='resnet50', half=False, phase='test', device=None): + self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') if device is None else device + + super(RetinaFace, self).__init__() + self.half_inference = half + cfg = generate_config(network_name) + self.backbone = cfg['name'] + + self.model_name = f'retinaface_{network_name}' + self.cfg = cfg + self.phase = phase + self.target_size, self.max_size = 1600, 2150 + self.resize, self.scale, self.scale1 = 1., None, None + self.mean_tensor = torch.tensor([[[[104.]], [[117.]], [[123.]]]], device=self.device) + self.reference = get_reference_facial_points(default_square=True) + # Build network. + backbone = None + if cfg['name'] == 'mobilenet0.25': + backbone = MobileNetV1() + self.body = IntermediateLayerGetter(backbone, cfg['return_layers']) + elif cfg['name'] == 'Resnet50': + import torchvision.models as models + backbone = models.resnet50(weights=None) + self.body = IntermediateLayerGetter(backbone, cfg['return_layers']) + + in_channels_stage2 = cfg['in_channel'] + in_channels_list = [ + in_channels_stage2 * 2, + in_channels_stage2 * 4, + in_channels_stage2 * 8, + ] + + out_channels = cfg['out_channel'] + self.fpn = FPN(in_channels_list, out_channels) + self.ssh1 = SSH(out_channels, out_channels) + self.ssh2 = SSH(out_channels, out_channels) + self.ssh3 = SSH(out_channels, out_channels) + + self.ClassHead = make_class_head(fpn_num=3, inchannels=cfg['out_channel']) + self.BboxHead = make_bbox_head(fpn_num=3, inchannels=cfg['out_channel']) + self.LandmarkHead = make_landmark_head(fpn_num=3, inchannels=cfg['out_channel']) + + self.to(self.device) + self.eval() + if self.half_inference: + self.half() + + def forward(self, inputs): + out = self.body(inputs) + + if self.backbone == 'mobilenet0.25' or self.backbone == 'Resnet50': + out = list(out.values()) + # FPN + fpn = self.fpn(out) + + # SSH + feature1 = self.ssh1(fpn[0]) + feature2 = self.ssh2(fpn[1]) + feature3 = self.ssh3(fpn[2]) + features = [feature1, feature2, feature3] + + bbox_regressions = torch.cat([self.BboxHead[i](feature) for i, feature in enumerate(features)], dim=1) + classifications = torch.cat([self.ClassHead[i](feature) for i, feature in enumerate(features)], dim=1) + tmp = [self.LandmarkHead[i](feature) for i, feature in enumerate(features)] + ldm_regressions = (torch.cat(tmp, dim=1)) + + if self.phase == 'train': + output = (bbox_regressions, classifications, ldm_regressions) + else: + output = (bbox_regressions, F.softmax(classifications, dim=-1), ldm_regressions) + return output + + def __detect_faces(self, inputs): + # get scale + height, width = inputs.shape[2:] + self.scale = torch.tensor([width, height, width, height], dtype=torch.float32, device=self.device) + tmp = [width, height, width, height, width, height, width, height, width, height] + self.scale1 = torch.tensor(tmp, dtype=torch.float32, device=self.device) + + # forawrd + inputs = inputs.to(self.device) + if self.half_inference: + inputs = inputs.half() + loc, conf, landmarks = self(inputs) + + # get priorbox + priorbox = PriorBox(self.cfg, image_size=inputs.shape[2:]) + priors = priorbox.forward().to(self.device) + + return loc, conf, landmarks, priors + + # single image detection + def transform(self, image, use_origin_size): + # convert to opencv format + if isinstance(image, Image.Image): + image = cv2.cvtColor(np.asarray(image), cv2.COLOR_RGB2BGR) + image = image.astype(np.float32) + + # testing scale + im_size_min = np.min(image.shape[0:2]) + im_size_max = np.max(image.shape[0:2]) + resize = float(self.target_size) / float(im_size_min) + + # prevent bigger axis from being more than max_size + if np.round(resize * im_size_max) > self.max_size: + resize = float(self.max_size) / float(im_size_max) + resize = 1 if use_origin_size else resize + + # resize + if resize != 1: + image = cv2.resize(image, None, None, fx=resize, fy=resize, interpolation=cv2.INTER_LINEAR) + + # convert to torch.tensor format + # image -= (104, 117, 123) + image = image.transpose(2, 0, 1) + image = torch.from_numpy(image).unsqueeze(0) + + return image, resize + + def detect_faces( + self, + image, + conf_threshold=0.8, + nms_threshold=0.4, + use_origin_size=True, + ): + image, self.resize = self.transform(image, use_origin_size) + image = image.to(self.device) + if self.half_inference: + image = image.half() + image = image - self.mean_tensor + + loc, conf, landmarks, priors = self.__detect_faces(image) + + boxes = decode(loc.data.squeeze(0), priors.data, self.cfg['variance']) + boxes = boxes * self.scale / self.resize + boxes = boxes.cpu().numpy() + + scores = conf.squeeze(0).data.cpu().numpy()[:, 1] + + landmarks = decode_landm(landmarks.squeeze(0), priors, self.cfg['variance']) + landmarks = landmarks * self.scale1 / self.resize + landmarks = landmarks.cpu().numpy() + + # ignore low scores + inds = np.where(scores > conf_threshold)[0] + boxes, landmarks, scores = boxes[inds], landmarks[inds], scores[inds] + + # sort + order = scores.argsort()[::-1] + boxes, landmarks, scores = boxes[order], landmarks[order], scores[order] + + # do NMS + bounding_boxes = np.hstack((boxes, scores[:, np.newaxis])).astype(np.float32, copy=False) + keep = py_cpu_nms(bounding_boxes, nms_threshold) + bounding_boxes, landmarks = bounding_boxes[keep, :], landmarks[keep] + # self.t['forward_pass'].toc() + # print(self.t['forward_pass'].average_time) + # import sys + # sys.stdout.flush() + return np.concatenate((bounding_boxes, landmarks), axis=1) + + def __align_multi(self, image, boxes, landmarks, limit=None): + + if len(boxes) < 1: + return [], [] + + if limit: + boxes = boxes[:limit] + landmarks = landmarks[:limit] + + faces = [] + for landmark in landmarks: + facial5points = [[landmark[2 * j], landmark[2 * j + 1]] for j in range(5)] + + warped_face = warp_and_crop_face(np.array(image), facial5points, self.reference, crop_size=(112, 112)) + faces.append(warped_face) + + return np.concatenate((boxes, landmarks), axis=1), faces + + def align_multi(self, img, conf_threshold=0.8, limit=None): + + rlt = self.detect_faces(img, conf_threshold=conf_threshold) + boxes, landmarks = rlt[:, 0:5], rlt[:, 5:] + + return self.__align_multi(img, boxes, landmarks, limit) + + # batched detection + def batched_transform(self, frames, use_origin_size): + """ + Arguments: + frames: a list of PIL.Image, or torch.Tensor(shape=[n, h, w, c], + type=np.float32, BGR format). + use_origin_size: whether to use origin size. + """ + from_PIL = True if isinstance(frames[0], Image.Image) else False + + # convert to opencv format + if from_PIL: + frames = [cv2.cvtColor(np.asarray(frame), cv2.COLOR_RGB2BGR) for frame in frames] + frames = np.asarray(frames, dtype=np.float32) + + # testing scale + im_size_min = np.min(frames[0].shape[0:2]) + im_size_max = np.max(frames[0].shape[0:2]) + resize = float(self.target_size) / float(im_size_min) + + # prevent bigger axis from being more than max_size + if np.round(resize * im_size_max) > self.max_size: + resize = float(self.max_size) / float(im_size_max) + resize = 1 if use_origin_size else resize + + # resize + if resize != 1: + if not from_PIL: + frames = F.interpolate(frames, scale_factor=resize) + else: + frames = [ + cv2.resize(frame, None, None, fx=resize, fy=resize, interpolation=cv2.INTER_LINEAR) + for frame in frames + ] + + # convert to torch.tensor format + if not from_PIL: + frames = frames.transpose(1, 2).transpose(1, 3).contiguous() + else: + frames = frames.transpose((0, 3, 1, 2)) + frames = torch.from_numpy(frames) + + return frames, resize + + def batched_detect_faces(self, frames, conf_threshold=0.8, nms_threshold=0.4, use_origin_size=True): + """ + Arguments: + frames: a list of PIL.Image, or np.array(shape=[n, h, w, c], + type=np.uint8, BGR format). + conf_threshold: confidence threshold. + nms_threshold: nms threshold. + use_origin_size: whether to use origin size. + Returns: + final_bounding_boxes: list of np.array ([n_boxes, 5], + type=np.float32). + final_landmarks: list of np.array ([n_boxes, 10], type=np.float32). + """ + # self.t['forward_pass'].tic() + frames, self.resize = self.batched_transform(frames, use_origin_size) + frames = frames.to(self.device) + frames = frames - self.mean_tensor + + b_loc, b_conf, b_landmarks, priors = self.__detect_faces(frames) + + final_bounding_boxes, final_landmarks = [], [] + + # decode + priors = priors.unsqueeze(0) + b_loc = batched_decode(b_loc, priors, self.cfg['variance']) * self.scale / self.resize + b_landmarks = batched_decode_landm(b_landmarks, priors, self.cfg['variance']) * self.scale1 / self.resize + b_conf = b_conf[:, :, 1] + + # index for selection + b_indice = b_conf > conf_threshold + + # concat + b_loc_and_conf = torch.cat((b_loc, b_conf.unsqueeze(-1)), dim=2).float() + + for pred, landm, inds in zip(b_loc_and_conf, b_landmarks, b_indice): + + # ignore low scores + pred, landm = pred[inds, :], landm[inds, :] + if pred.shape[0] == 0: + final_bounding_boxes.append(np.array([], dtype=np.float32)) + final_landmarks.append(np.array([], dtype=np.float32)) + continue + + # sort + # order = score.argsort(descending=True) + # box, landm, score = box[order], landm[order], score[order] + + # to CPU + bounding_boxes, landm = pred.cpu().numpy(), landm.cpu().numpy() + + # NMS + keep = py_cpu_nms(bounding_boxes, nms_threshold) + bounding_boxes, landmarks = bounding_boxes[keep, :], landm[keep] + + # append + final_bounding_boxes.append(bounding_boxes) + final_landmarks.append(landmarks) + # self.t['forward_pass'].toc(average=True) + # self.batch_time += self.t['forward_pass'].diff + # self.total_frame += len(frames) + # print(self.batch_time / self.total_frame) + + return final_bounding_boxes, final_landmarks diff --git a/extras/facexlib/detection/retinaface_net.py b/extras/facexlib/detection/retinaface_net.py new file mode 100644 index 0000000000000000000000000000000000000000..ab6aa82d3e9055a838f1f9076b12f05fdfc154d0 --- /dev/null +++ b/extras/facexlib/detection/retinaface_net.py @@ -0,0 +1,196 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + + +def conv_bn(inp, oup, stride=1, leaky=0): + return nn.Sequential( + nn.Conv2d(inp, oup, 3, stride, 1, bias=False), nn.BatchNorm2d(oup), + nn.LeakyReLU(negative_slope=leaky, inplace=True)) + + +def conv_bn_no_relu(inp, oup, stride): + return nn.Sequential( + nn.Conv2d(inp, oup, 3, stride, 1, bias=False), + nn.BatchNorm2d(oup), + ) + + +def conv_bn1X1(inp, oup, stride, leaky=0): + return nn.Sequential( + nn.Conv2d(inp, oup, 1, stride, padding=0, bias=False), nn.BatchNorm2d(oup), + nn.LeakyReLU(negative_slope=leaky, inplace=True)) + + +def conv_dw(inp, oup, stride, leaky=0.1): + return nn.Sequential( + nn.Conv2d(inp, inp, 3, stride, 1, groups=inp, bias=False), + nn.BatchNorm2d(inp), + nn.LeakyReLU(negative_slope=leaky, inplace=True), + nn.Conv2d(inp, oup, 1, 1, 0, bias=False), + nn.BatchNorm2d(oup), + nn.LeakyReLU(negative_slope=leaky, inplace=True), + ) + + +class SSH(nn.Module): + + def __init__(self, in_channel, out_channel): + super(SSH, self).__init__() + assert out_channel % 4 == 0 + leaky = 0 + if (out_channel <= 64): + leaky = 0.1 + self.conv3X3 = conv_bn_no_relu(in_channel, out_channel // 2, stride=1) + + self.conv5X5_1 = conv_bn(in_channel, out_channel // 4, stride=1, leaky=leaky) + self.conv5X5_2 = conv_bn_no_relu(out_channel // 4, out_channel // 4, stride=1) + + self.conv7X7_2 = conv_bn(out_channel // 4, out_channel // 4, stride=1, leaky=leaky) + self.conv7x7_3 = conv_bn_no_relu(out_channel // 4, out_channel // 4, stride=1) + + def forward(self, input): + conv3X3 = self.conv3X3(input) + + conv5X5_1 = self.conv5X5_1(input) + conv5X5 = self.conv5X5_2(conv5X5_1) + + conv7X7_2 = self.conv7X7_2(conv5X5_1) + conv7X7 = self.conv7x7_3(conv7X7_2) + + out = torch.cat([conv3X3, conv5X5, conv7X7], dim=1) + out = F.relu(out) + return out + + +class FPN(nn.Module): + + def __init__(self, in_channels_list, out_channels): + super(FPN, self).__init__() + leaky = 0 + if (out_channels <= 64): + leaky = 0.1 + self.output1 = conv_bn1X1(in_channels_list[0], out_channels, stride=1, leaky=leaky) + self.output2 = conv_bn1X1(in_channels_list[1], out_channels, stride=1, leaky=leaky) + self.output3 = conv_bn1X1(in_channels_list[2], out_channels, stride=1, leaky=leaky) + + self.merge1 = conv_bn(out_channels, out_channels, leaky=leaky) + self.merge2 = conv_bn(out_channels, out_channels, leaky=leaky) + + def forward(self, input): + # names = list(input.keys()) + # input = list(input.values()) + + output1 = self.output1(input[0]) + output2 = self.output2(input[1]) + output3 = self.output3(input[2]) + + up3 = F.interpolate(output3, size=[output2.size(2), output2.size(3)], mode='nearest') + output2 = output2 + up3 + output2 = self.merge2(output2) + + up2 = F.interpolate(output2, size=[output1.size(2), output1.size(3)], mode='nearest') + output1 = output1 + up2 + output1 = self.merge1(output1) + + out = [output1, output2, output3] + return out + + +class MobileNetV1(nn.Module): + + def __init__(self): + super(MobileNetV1, self).__init__() + self.stage1 = nn.Sequential( + conv_bn(3, 8, 2, leaky=0.1), # 3 + conv_dw(8, 16, 1), # 7 + conv_dw(16, 32, 2), # 11 + conv_dw(32, 32, 1), # 19 + conv_dw(32, 64, 2), # 27 + conv_dw(64, 64, 1), # 43 + ) + self.stage2 = nn.Sequential( + conv_dw(64, 128, 2), # 43 + 16 = 59 + conv_dw(128, 128, 1), # 59 + 32 = 91 + conv_dw(128, 128, 1), # 91 + 32 = 123 + conv_dw(128, 128, 1), # 123 + 32 = 155 + conv_dw(128, 128, 1), # 155 + 32 = 187 + conv_dw(128, 128, 1), # 187 + 32 = 219 + ) + self.stage3 = nn.Sequential( + conv_dw(128, 256, 2), # 219 +3 2 = 241 + conv_dw(256, 256, 1), # 241 + 64 = 301 + ) + self.avg = nn.AdaptiveAvgPool2d((1, 1)) + self.fc = nn.Linear(256, 1000) + + def forward(self, x): + x = self.stage1(x) + x = self.stage2(x) + x = self.stage3(x) + x = self.avg(x) + # x = self.model(x) + x = x.view(-1, 256) + x = self.fc(x) + return x + + +class ClassHead(nn.Module): + + def __init__(self, inchannels=512, num_anchors=3): + super(ClassHead, self).__init__() + self.num_anchors = num_anchors + self.conv1x1 = nn.Conv2d(inchannels, self.num_anchors * 2, kernel_size=(1, 1), stride=1, padding=0) + + def forward(self, x): + out = self.conv1x1(x) + out = out.permute(0, 2, 3, 1).contiguous() + + return out.view(out.shape[0], -1, 2) + + +class BboxHead(nn.Module): + + def __init__(self, inchannels=512, num_anchors=3): + super(BboxHead, self).__init__() + self.conv1x1 = nn.Conv2d(inchannels, num_anchors * 4, kernel_size=(1, 1), stride=1, padding=0) + + def forward(self, x): + out = self.conv1x1(x) + out = out.permute(0, 2, 3, 1).contiguous() + + return out.view(out.shape[0], -1, 4) + + +class LandmarkHead(nn.Module): + + def __init__(self, inchannels=512, num_anchors=3): + super(LandmarkHead, self).__init__() + self.conv1x1 = nn.Conv2d(inchannels, num_anchors * 10, kernel_size=(1, 1), stride=1, padding=0) + + def forward(self, x): + out = self.conv1x1(x) + out = out.permute(0, 2, 3, 1).contiguous() + + return out.view(out.shape[0], -1, 10) + + +def make_class_head(fpn_num=3, inchannels=64, anchor_num=2): + classhead = nn.ModuleList() + for i in range(fpn_num): + classhead.append(ClassHead(inchannels, anchor_num)) + return classhead + + +def make_bbox_head(fpn_num=3, inchannels=64, anchor_num=2): + bboxhead = nn.ModuleList() + for i in range(fpn_num): + bboxhead.append(BboxHead(inchannels, anchor_num)) + return bboxhead + + +def make_landmark_head(fpn_num=3, inchannels=64, anchor_num=2): + landmarkhead = nn.ModuleList() + for i in range(fpn_num): + landmarkhead.append(LandmarkHead(inchannels, anchor_num)) + return landmarkhead diff --git a/extras/facexlib/detection/retinaface_utils.py b/extras/facexlib/detection/retinaface_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..8c357757741c6d9bd7ce4d8ce740fefd51850fbf --- /dev/null +++ b/extras/facexlib/detection/retinaface_utils.py @@ -0,0 +1,421 @@ +import numpy as np +import torch +import torchvision +from itertools import product as product +from math import ceil + + +class PriorBox(object): + + def __init__(self, cfg, image_size=None, phase='train'): + super(PriorBox, self).__init__() + self.min_sizes = cfg['min_sizes'] + self.steps = cfg['steps'] + self.clip = cfg['clip'] + self.image_size = image_size + self.feature_maps = [[ceil(self.image_size[0] / step), ceil(self.image_size[1] / step)] for step in self.steps] + self.name = 's' + + def forward(self): + anchors = [] + for k, f in enumerate(self.feature_maps): + min_sizes = self.min_sizes[k] + for i, j in product(range(f[0]), range(f[1])): + for min_size in min_sizes: + s_kx = min_size / self.image_size[1] + s_ky = min_size / self.image_size[0] + dense_cx = [x * self.steps[k] / self.image_size[1] for x in [j + 0.5]] + dense_cy = [y * self.steps[k] / self.image_size[0] for y in [i + 0.5]] + for cy, cx in product(dense_cy, dense_cx): + anchors += [cx, cy, s_kx, s_ky] + + # back to torch land + output = torch.Tensor(anchors).view(-1, 4) + if self.clip: + output.clamp_(max=1, min=0) + return output + + +def py_cpu_nms(dets, thresh): + """Pure Python NMS baseline.""" + keep = torchvision.ops.nms( + boxes=torch.Tensor(dets[:, :4]), + scores=torch.Tensor(dets[:, 4]), + iou_threshold=thresh, + ) + + return list(keep) + + +def point_form(boxes): + """ Convert prior_boxes to (xmin, ymin, xmax, ymax) + representation for comparison to point form ground truth data. + Args: + boxes: (tensor) center-size default boxes from priorbox layers. + Return: + boxes: (tensor) Converted xmin, ymin, xmax, ymax form of boxes. + """ + return torch.cat( + ( + boxes[:, :2] - boxes[:, 2:] / 2, # xmin, ymin + boxes[:, :2] + boxes[:, 2:] / 2), + 1) # xmax, ymax + + +def center_size(boxes): + """ Convert prior_boxes to (cx, cy, w, h) + representation for comparison to center-size form ground truth data. + Args: + boxes: (tensor) point_form boxes + Return: + boxes: (tensor) Converted xmin, ymin, xmax, ymax form of boxes. + """ + return torch.cat( + (boxes[:, 2:] + boxes[:, :2]) / 2, # cx, cy + boxes[:, 2:] - boxes[:, :2], + 1) # w, h + + +def intersect(box_a, box_b): + """ We resize both tensors to [A,B,2] without new malloc: + [A,2] -> [A,1,2] -> [A,B,2] + [B,2] -> [1,B,2] -> [A,B,2] + Then we compute the area of intersect between box_a and box_b. + Args: + box_a: (tensor) bounding boxes, Shape: [A,4]. + box_b: (tensor) bounding boxes, Shape: [B,4]. + Return: + (tensor) intersection area, Shape: [A,B]. + """ + A = box_a.size(0) + B = box_b.size(0) + max_xy = torch.min(box_a[:, 2:].unsqueeze(1).expand(A, B, 2), box_b[:, 2:].unsqueeze(0).expand(A, B, 2)) + min_xy = torch.max(box_a[:, :2].unsqueeze(1).expand(A, B, 2), box_b[:, :2].unsqueeze(0).expand(A, B, 2)) + inter = torch.clamp((max_xy - min_xy), min=0) + return inter[:, :, 0] * inter[:, :, 1] + + +def jaccard(box_a, box_b): + """Compute the jaccard overlap of two sets of boxes. The jaccard overlap + is simply the intersection over union of two boxes. Here we operate on + ground truth boxes and default boxes. + E.g.: + A ∩ B / A ∪ B = A ∩ B / (area(A) + area(B) - A ∩ B) + Args: + box_a: (tensor) Ground truth bounding boxes, Shape: [num_objects,4] + box_b: (tensor) Prior boxes from priorbox layers, Shape: [num_priors,4] + Return: + jaccard overlap: (tensor) Shape: [box_a.size(0), box_b.size(0)] + """ + inter = intersect(box_a, box_b) + area_a = ((box_a[:, 2] - box_a[:, 0]) * (box_a[:, 3] - box_a[:, 1])).unsqueeze(1).expand_as(inter) # [A,B] + area_b = ((box_b[:, 2] - box_b[:, 0]) * (box_b[:, 3] - box_b[:, 1])).unsqueeze(0).expand_as(inter) # [A,B] + union = area_a + area_b - inter + return inter / union # [A,B] + + +def matrix_iou(a, b): + """ + return iou of a and b, numpy version for data augenmentation + """ + lt = np.maximum(a[:, np.newaxis, :2], b[:, :2]) + rb = np.minimum(a[:, np.newaxis, 2:], b[:, 2:]) + + area_i = np.prod(rb - lt, axis=2) * (lt < rb).all(axis=2) + area_a = np.prod(a[:, 2:] - a[:, :2], axis=1) + area_b = np.prod(b[:, 2:] - b[:, :2], axis=1) + return area_i / (area_a[:, np.newaxis] + area_b - area_i) + + +def matrix_iof(a, b): + """ + return iof of a and b, numpy version for data augenmentation + """ + lt = np.maximum(a[:, np.newaxis, :2], b[:, :2]) + rb = np.minimum(a[:, np.newaxis, 2:], b[:, 2:]) + + area_i = np.prod(rb - lt, axis=2) * (lt < rb).all(axis=2) + area_a = np.prod(a[:, 2:] - a[:, :2], axis=1) + return area_i / np.maximum(area_a[:, np.newaxis], 1) + + +def match(threshold, truths, priors, variances, labels, landms, loc_t, conf_t, landm_t, idx): + """Match each prior box with the ground truth box of the highest jaccard + overlap, encode the bounding boxes, then return the matched indices + corresponding to both confidence and location preds. + Args: + threshold: (float) The overlap threshold used when matching boxes. + truths: (tensor) Ground truth boxes, Shape: [num_obj, 4]. + priors: (tensor) Prior boxes from priorbox layers, Shape: [n_priors,4]. + variances: (tensor) Variances corresponding to each prior coord, + Shape: [num_priors, 4]. + labels: (tensor) All the class labels for the image, Shape: [num_obj]. + landms: (tensor) Ground truth landms, Shape [num_obj, 10]. + loc_t: (tensor) Tensor to be filled w/ encoded location targets. + conf_t: (tensor) Tensor to be filled w/ matched indices for conf preds. + landm_t: (tensor) Tensor to be filled w/ encoded landm targets. + idx: (int) current batch index + Return: + The matched indices corresponding to 1)location 2)confidence + 3)landm preds. + """ + # jaccard index + overlaps = jaccard(truths, point_form(priors)) + # (Bipartite Matching) + # [1,num_objects] best prior for each ground truth + best_prior_overlap, best_prior_idx = overlaps.max(1, keepdim=True) + + # ignore hard gt + valid_gt_idx = best_prior_overlap[:, 0] >= 0.2 + best_prior_idx_filter = best_prior_idx[valid_gt_idx, :] + if best_prior_idx_filter.shape[0] <= 0: + loc_t[idx] = 0 + conf_t[idx] = 0 + return + + # [1,num_priors] best ground truth for each prior + best_truth_overlap, best_truth_idx = overlaps.max(0, keepdim=True) + best_truth_idx.squeeze_(0) + best_truth_overlap.squeeze_(0) + best_prior_idx.squeeze_(1) + best_prior_idx_filter.squeeze_(1) + best_prior_overlap.squeeze_(1) + best_truth_overlap.index_fill_(0, best_prior_idx_filter, 2) # ensure best prior + # TODO refactor: index best_prior_idx with long tensor + # ensure every gt matches with its prior of max overlap + for j in range(best_prior_idx.size(0)): # 判别此anchor是预测哪一个boxes + best_truth_idx[best_prior_idx[j]] = j + matches = truths[best_truth_idx] # Shape: [num_priors,4] 此处为每一个anchor对应的bbox取出来 + conf = labels[best_truth_idx] # Shape: [num_priors] 此处为每一个anchor对应的label取出来 + conf[best_truth_overlap < threshold] = 0 # label as background overlap<0.35的全部作为负样本 + loc = encode(matches, priors, variances) + + matches_landm = landms[best_truth_idx] + landm = encode_landm(matches_landm, priors, variances) + loc_t[idx] = loc # [num_priors,4] encoded offsets to learn + conf_t[idx] = conf # [num_priors] top class label for each prior + landm_t[idx] = landm + + +def encode(matched, priors, variances): + """Encode the variances from the priorbox layers into the ground truth boxes + we have matched (based on jaccard overlap) with the prior boxes. + Args: + matched: (tensor) Coords of ground truth for each prior in point-form + Shape: [num_priors, 4]. + priors: (tensor) Prior boxes in center-offset form + Shape: [num_priors,4]. + variances: (list[float]) Variances of priorboxes + Return: + encoded boxes (tensor), Shape: [num_priors, 4] + """ + + # dist b/t match center and prior's center + g_cxcy = (matched[:, :2] + matched[:, 2:]) / 2 - priors[:, :2] + # encode variance + g_cxcy /= (variances[0] * priors[:, 2:]) + # match wh / prior wh + g_wh = (matched[:, 2:] - matched[:, :2]) / priors[:, 2:] + g_wh = torch.log(g_wh) / variances[1] + # return target for smooth_l1_loss + return torch.cat([g_cxcy, g_wh], 1) # [num_priors,4] + + +def encode_landm(matched, priors, variances): + """Encode the variances from the priorbox layers into the ground truth boxes + we have matched (based on jaccard overlap) with the prior boxes. + Args: + matched: (tensor) Coords of ground truth for each prior in point-form + Shape: [num_priors, 10]. + priors: (tensor) Prior boxes in center-offset form + Shape: [num_priors,4]. + variances: (list[float]) Variances of priorboxes + Return: + encoded landm (tensor), Shape: [num_priors, 10] + """ + + # dist b/t match center and prior's center + matched = torch.reshape(matched, (matched.size(0), 5, 2)) + priors_cx = priors[:, 0].unsqueeze(1).expand(matched.size(0), 5).unsqueeze(2) + priors_cy = priors[:, 1].unsqueeze(1).expand(matched.size(0), 5).unsqueeze(2) + priors_w = priors[:, 2].unsqueeze(1).expand(matched.size(0), 5).unsqueeze(2) + priors_h = priors[:, 3].unsqueeze(1).expand(matched.size(0), 5).unsqueeze(2) + priors = torch.cat([priors_cx, priors_cy, priors_w, priors_h], dim=2) + g_cxcy = matched[:, :, :2] - priors[:, :, :2] + # encode variance + g_cxcy /= (variances[0] * priors[:, :, 2:]) + # g_cxcy /= priors[:, :, 2:] + g_cxcy = g_cxcy.reshape(g_cxcy.size(0), -1) + # return target for smooth_l1_loss + return g_cxcy + + +# Adapted from https://github.com/Hakuyume/chainer-ssd +def decode(loc, priors, variances): + """Decode locations from predictions using priors to undo + the encoding we did for offset regression at train time. + Args: + loc (tensor): location predictions for loc layers, + Shape: [num_priors,4] + priors (tensor): Prior boxes in center-offset form. + Shape: [num_priors,4]. + variances: (list[float]) Variances of priorboxes + Return: + decoded bounding box predictions + """ + + boxes = torch.cat((priors[:, :2] + loc[:, :2] * variances[0] * priors[:, 2:], + priors[:, 2:] * torch.exp(loc[:, 2:] * variances[1])), 1) + boxes[:, :2] -= boxes[:, 2:] / 2 + boxes[:, 2:] += boxes[:, :2] + return boxes + + +def decode_landm(pre, priors, variances): + """Decode landm from predictions using priors to undo + the encoding we did for offset regression at train time. + Args: + pre (tensor): landm predictions for loc layers, + Shape: [num_priors,10] + priors (tensor): Prior boxes in center-offset form. + Shape: [num_priors,4]. + variances: (list[float]) Variances of priorboxes + Return: + decoded landm predictions + """ + tmp = ( + priors[:, :2] + pre[:, :2] * variances[0] * priors[:, 2:], + priors[:, :2] + pre[:, 2:4] * variances[0] * priors[:, 2:], + priors[:, :2] + pre[:, 4:6] * variances[0] * priors[:, 2:], + priors[:, :2] + pre[:, 6:8] * variances[0] * priors[:, 2:], + priors[:, :2] + pre[:, 8:10] * variances[0] * priors[:, 2:], + ) + landms = torch.cat(tmp, dim=1) + return landms + + +def batched_decode(b_loc, priors, variances): + """Decode locations from predictions using priors to undo + the encoding we did for offset regression at train time. + Args: + b_loc (tensor): location predictions for loc layers, + Shape: [num_batches,num_priors,4] + priors (tensor): Prior boxes in center-offset form. + Shape: [1,num_priors,4]. + variances: (list[float]) Variances of priorboxes + Return: + decoded bounding box predictions + """ + boxes = ( + priors[:, :, :2] + b_loc[:, :, :2] * variances[0] * priors[:, :, 2:], + priors[:, :, 2:] * torch.exp(b_loc[:, :, 2:] * variances[1]), + ) + boxes = torch.cat(boxes, dim=2) + + boxes[:, :, :2] -= boxes[:, :, 2:] / 2 + boxes[:, :, 2:] += boxes[:, :, :2] + return boxes + + +def batched_decode_landm(pre, priors, variances): + """Decode landm from predictions using priors to undo + the encoding we did for offset regression at train time. + Args: + pre (tensor): landm predictions for loc layers, + Shape: [num_batches,num_priors,10] + priors (tensor): Prior boxes in center-offset form. + Shape: [1,num_priors,4]. + variances: (list[float]) Variances of priorboxes + Return: + decoded landm predictions + """ + landms = ( + priors[:, :, :2] + pre[:, :, :2] * variances[0] * priors[:, :, 2:], + priors[:, :, :2] + pre[:, :, 2:4] * variances[0] * priors[:, :, 2:], + priors[:, :, :2] + pre[:, :, 4:6] * variances[0] * priors[:, :, 2:], + priors[:, :, :2] + pre[:, :, 6:8] * variances[0] * priors[:, :, 2:], + priors[:, :, :2] + pre[:, :, 8:10] * variances[0] * priors[:, :, 2:], + ) + landms = torch.cat(landms, dim=2) + return landms + + +def log_sum_exp(x): + """Utility function for computing log_sum_exp while determining + This will be used to determine unaveraged confidence loss across + all examples in a batch. + Args: + x (Variable(tensor)): conf_preds from conf layers + """ + x_max = x.data.max() + return torch.log(torch.sum(torch.exp(x - x_max), 1, keepdim=True)) + x_max + + +# Original author: Francisco Massa: +# https://github.com/fmassa/object-detection.torch +# Ported to PyTorch by Max deGroot (02/01/2017) +def nms(boxes, scores, overlap=0.5, top_k=200): + """Apply non-maximum suppression at test time to avoid detecting too many + overlapping bounding boxes for a given object. + Args: + boxes: (tensor) The location preds for the img, Shape: [num_priors,4]. + scores: (tensor) The class predscores for the img, Shape:[num_priors]. + overlap: (float) The overlap thresh for suppressing unnecessary boxes. + top_k: (int) The Maximum number of box preds to consider. + Return: + The indices of the kept boxes with respect to num_priors. + """ + + keep = torch.Tensor(scores.size(0)).fill_(0).long() + if boxes.numel() == 0: + return keep + x1 = boxes[:, 0] + y1 = boxes[:, 1] + x2 = boxes[:, 2] + y2 = boxes[:, 3] + area = torch.mul(x2 - x1, y2 - y1) + v, idx = scores.sort(0) # sort in ascending order + # I = I[v >= 0.01] + idx = idx[-top_k:] # indices of the top-k largest vals + xx1 = boxes.new() + yy1 = boxes.new() + xx2 = boxes.new() + yy2 = boxes.new() + w = boxes.new() + h = boxes.new() + + # keep = torch.Tensor() + count = 0 + while idx.numel() > 0: + i = idx[-1] # index of current largest val + # keep.append(i) + keep[count] = i + count += 1 + if idx.size(0) == 1: + break + idx = idx[:-1] # remove kept element from view + # load bboxes of next highest vals + torch.index_select(x1, 0, idx, out=xx1) + torch.index_select(y1, 0, idx, out=yy1) + torch.index_select(x2, 0, idx, out=xx2) + torch.index_select(y2, 0, idx, out=yy2) + # store element-wise max with next highest score + xx1 = torch.clamp(xx1, min=x1[i]) + yy1 = torch.clamp(yy1, min=y1[i]) + xx2 = torch.clamp(xx2, max=x2[i]) + yy2 = torch.clamp(yy2, max=y2[i]) + w.resize_as_(xx2) + h.resize_as_(yy2) + w = xx2 - xx1 + h = yy2 - yy1 + # check sizes of xx1 and xx2.. after each iteration + w = torch.clamp(w, min=0.0) + h = torch.clamp(h, min=0.0) + inter = w * h + # IoU = i / (area(a) + area(b) - i) + rem_areas = torch.index_select(area, 0, idx) # load remaining areas) + union = (rem_areas - inter) + area[i] + IoU = inter / union # store result in iou + # keep only elements with an IoU <= overlap + idx = idx[IoU.le(overlap)] + return keep, count diff --git a/extras/facexlib/parsing/__init__.py b/extras/facexlib/parsing/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8b4758bdda6e6f1528b943a37e32d901a32fa569 --- /dev/null +++ b/extras/facexlib/parsing/__init__.py @@ -0,0 +1,24 @@ +import torch + +from extras.facexlib.utils import load_file_from_url +from .bisenet import BiSeNet +from .parsenet import ParseNet + + +def init_parsing_model(model_name='bisenet', half=False, device='cuda', model_rootpath=None): + if model_name == 'bisenet': + model = BiSeNet(num_class=19) + model_url = 'https://github.com/xinntao/facexlib/releases/download/v0.2.0/parsing_bisenet.pth' + elif model_name == 'parsenet': + model = ParseNet(in_size=512, out_size=512, parsing_ch=19) + model_url = 'https://github.com/xinntao/facexlib/releases/download/v0.2.2/parsing_parsenet.pth' + else: + raise NotImplementedError(f'{model_name} is not implemented.') + + model_path = load_file_from_url( + url=model_url, model_dir='facexlib/weights', progress=True, file_name=None, save_dir=model_rootpath) + load_net = torch.load(model_path, map_location=lambda storage, loc: storage) + model.load_state_dict(load_net, strict=True) + model.eval() + model = model.to(device) + return model diff --git a/extras/facexlib/parsing/__pycache__/__init__.cpython-310.pyc b/extras/facexlib/parsing/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..19d1e41e379d2447f17cb2ef1cb58faa776f7f6f Binary files /dev/null and b/extras/facexlib/parsing/__pycache__/__init__.cpython-310.pyc differ diff --git a/extras/facexlib/parsing/__pycache__/bisenet.cpython-310.pyc b/extras/facexlib/parsing/__pycache__/bisenet.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..af2418fa183096e9db4bcf0a04d0e60ac949be63 Binary files /dev/null and b/extras/facexlib/parsing/__pycache__/bisenet.cpython-310.pyc differ diff --git a/extras/facexlib/parsing/__pycache__/parsenet.cpython-310.pyc b/extras/facexlib/parsing/__pycache__/parsenet.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bc6637cbf5e1168f5ed09120df385c67a0e67d53 Binary files /dev/null and b/extras/facexlib/parsing/__pycache__/parsenet.cpython-310.pyc differ diff --git a/extras/facexlib/parsing/__pycache__/resnet.cpython-310.pyc b/extras/facexlib/parsing/__pycache__/resnet.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cee6ba39596c9973b7288648d3cebb4485047afb Binary files /dev/null and b/extras/facexlib/parsing/__pycache__/resnet.cpython-310.pyc differ diff --git a/extras/facexlib/parsing/bisenet.py b/extras/facexlib/parsing/bisenet.py new file mode 100644 index 0000000000000000000000000000000000000000..3898cab76ae5876459cd4899c54cafa14234971d --- /dev/null +++ b/extras/facexlib/parsing/bisenet.py @@ -0,0 +1,140 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +from .resnet import ResNet18 + + +class ConvBNReLU(nn.Module): + + def __init__(self, in_chan, out_chan, ks=3, stride=1, padding=1): + super(ConvBNReLU, self).__init__() + self.conv = nn.Conv2d(in_chan, out_chan, kernel_size=ks, stride=stride, padding=padding, bias=False) + self.bn = nn.BatchNorm2d(out_chan) + + def forward(self, x): + x = self.conv(x) + x = F.relu(self.bn(x)) + return x + + +class BiSeNetOutput(nn.Module): + + def __init__(self, in_chan, mid_chan, num_class): + super(BiSeNetOutput, self).__init__() + self.conv = ConvBNReLU(in_chan, mid_chan, ks=3, stride=1, padding=1) + self.conv_out = nn.Conv2d(mid_chan, num_class, kernel_size=1, bias=False) + + def forward(self, x): + feat = self.conv(x) + out = self.conv_out(feat) + return out, feat + + +class AttentionRefinementModule(nn.Module): + + def __init__(self, in_chan, out_chan): + super(AttentionRefinementModule, self).__init__() + self.conv = ConvBNReLU(in_chan, out_chan, ks=3, stride=1, padding=1) + self.conv_atten = nn.Conv2d(out_chan, out_chan, kernel_size=1, bias=False) + self.bn_atten = nn.BatchNorm2d(out_chan) + self.sigmoid_atten = nn.Sigmoid() + + def forward(self, x): + feat = self.conv(x) + atten = F.avg_pool2d(feat, feat.size()[2:]) + atten = self.conv_atten(atten) + atten = self.bn_atten(atten) + atten = self.sigmoid_atten(atten) + out = torch.mul(feat, atten) + return out + + +class ContextPath(nn.Module): + + def __init__(self): + super(ContextPath, self).__init__() + self.resnet = ResNet18() + self.arm16 = AttentionRefinementModule(256, 128) + self.arm32 = AttentionRefinementModule(512, 128) + self.conv_head32 = ConvBNReLU(128, 128, ks=3, stride=1, padding=1) + self.conv_head16 = ConvBNReLU(128, 128, ks=3, stride=1, padding=1) + self.conv_avg = ConvBNReLU(512, 128, ks=1, stride=1, padding=0) + + def forward(self, x): + feat8, feat16, feat32 = self.resnet(x) + h8, w8 = feat8.size()[2:] + h16, w16 = feat16.size()[2:] + h32, w32 = feat32.size()[2:] + + avg = F.avg_pool2d(feat32, feat32.size()[2:]) + avg = self.conv_avg(avg) + avg_up = F.interpolate(avg, (h32, w32), mode='nearest') + + feat32_arm = self.arm32(feat32) + feat32_sum = feat32_arm + avg_up + feat32_up = F.interpolate(feat32_sum, (h16, w16), mode='nearest') + feat32_up = self.conv_head32(feat32_up) + + feat16_arm = self.arm16(feat16) + feat16_sum = feat16_arm + feat32_up + feat16_up = F.interpolate(feat16_sum, (h8, w8), mode='nearest') + feat16_up = self.conv_head16(feat16_up) + + return feat8, feat16_up, feat32_up # x8, x8, x16 + + +class FeatureFusionModule(nn.Module): + + def __init__(self, in_chan, out_chan): + super(FeatureFusionModule, self).__init__() + self.convblk = ConvBNReLU(in_chan, out_chan, ks=1, stride=1, padding=0) + self.conv1 = nn.Conv2d(out_chan, out_chan // 4, kernel_size=1, stride=1, padding=0, bias=False) + self.conv2 = nn.Conv2d(out_chan // 4, out_chan, kernel_size=1, stride=1, padding=0, bias=False) + self.relu = nn.ReLU(inplace=True) + self.sigmoid = nn.Sigmoid() + + def forward(self, fsp, fcp): + fcat = torch.cat([fsp, fcp], dim=1) + feat = self.convblk(fcat) + atten = F.avg_pool2d(feat, feat.size()[2:]) + atten = self.conv1(atten) + atten = self.relu(atten) + atten = self.conv2(atten) + atten = self.sigmoid(atten) + feat_atten = torch.mul(feat, atten) + feat_out = feat_atten + feat + return feat_out + + +class BiSeNet(nn.Module): + + def __init__(self, num_class): + super(BiSeNet, self).__init__() + self.cp = ContextPath() + self.ffm = FeatureFusionModule(256, 256) + self.conv_out = BiSeNetOutput(256, 256, num_class) + self.conv_out16 = BiSeNetOutput(128, 64, num_class) + self.conv_out32 = BiSeNetOutput(128, 64, num_class) + + def forward(self, x, return_feat=False): + h, w = x.size()[2:] + feat_res8, feat_cp8, feat_cp16 = self.cp(x) # return res3b1 feature + feat_sp = feat_res8 # replace spatial path feature with res3b1 feature + feat_fuse = self.ffm(feat_sp, feat_cp8) + + out, feat = self.conv_out(feat_fuse) + out16, feat16 = self.conv_out16(feat_cp8) + out32, feat32 = self.conv_out32(feat_cp16) + + out = F.interpolate(out, (h, w), mode='bilinear', align_corners=True) + out16 = F.interpolate(out16, (h, w), mode='bilinear', align_corners=True) + out32 = F.interpolate(out32, (h, w), mode='bilinear', align_corners=True) + + if return_feat: + feat = F.interpolate(feat, (h, w), mode='bilinear', align_corners=True) + feat16 = F.interpolate(feat16, (h, w), mode='bilinear', align_corners=True) + feat32 = F.interpolate(feat32, (h, w), mode='bilinear', align_corners=True) + return out, out16, out32, feat, feat16, feat32 + else: + return out, out16, out32 diff --git a/extras/facexlib/parsing/parsenet.py b/extras/facexlib/parsing/parsenet.py new file mode 100644 index 0000000000000000000000000000000000000000..e178ebe43a1ef666aaea0bc0faf629485c22a24f --- /dev/null +++ b/extras/facexlib/parsing/parsenet.py @@ -0,0 +1,194 @@ +"""Modified from https://github.com/chaofengc/PSFRGAN +""" +import numpy as np +import torch.nn as nn +from torch.nn import functional as F + + +class NormLayer(nn.Module): + """Normalization Layers. + + Args: + channels: input channels, for batch norm and instance norm. + input_size: input shape without batch size, for layer norm. + """ + + def __init__(self, channels, normalize_shape=None, norm_type='bn'): + super(NormLayer, self).__init__() + norm_type = norm_type.lower() + self.norm_type = norm_type + if norm_type == 'bn': + self.norm = nn.BatchNorm2d(channels, affine=True) + elif norm_type == 'in': + self.norm = nn.InstanceNorm2d(channels, affine=False) + elif norm_type == 'gn': + self.norm = nn.GroupNorm(32, channels, affine=True) + elif norm_type == 'pixel': + self.norm = lambda x: F.normalize(x, p=2, dim=1) + elif norm_type == 'layer': + self.norm = nn.LayerNorm(normalize_shape) + elif norm_type == 'none': + self.norm = lambda x: x * 1.0 + else: + assert 1 == 0, f'Norm type {norm_type} not support.' + + def forward(self, x, ref=None): + if self.norm_type == 'spade': + return self.norm(x, ref) + else: + return self.norm(x) + + +class ReluLayer(nn.Module): + """Relu Layer. + + Args: + relu type: type of relu layer, candidates are + - ReLU + - LeakyReLU: default relu slope 0.2 + - PRelu + - SELU + - none: direct pass + """ + + def __init__(self, channels, relu_type='relu'): + super(ReluLayer, self).__init__() + relu_type = relu_type.lower() + if relu_type == 'relu': + self.func = nn.ReLU(True) + elif relu_type == 'leakyrelu': + self.func = nn.LeakyReLU(0.2, inplace=True) + elif relu_type == 'prelu': + self.func = nn.PReLU(channels) + elif relu_type == 'selu': + self.func = nn.SELU(True) + elif relu_type == 'none': + self.func = lambda x: x * 1.0 + else: + assert 1 == 0, f'Relu type {relu_type} not support.' + + def forward(self, x): + return self.func(x) + + +class ConvLayer(nn.Module): + + def __init__(self, + in_channels, + out_channels, + kernel_size=3, + scale='none', + norm_type='none', + relu_type='none', + use_pad=True, + bias=True): + super(ConvLayer, self).__init__() + self.use_pad = use_pad + self.norm_type = norm_type + if norm_type in ['bn']: + bias = False + + stride = 2 if scale == 'down' else 1 + + self.scale_func = lambda x: x + if scale == 'up': + self.scale_func = lambda x: nn.functional.interpolate(x, scale_factor=2, mode='nearest') + + self.reflection_pad = nn.ReflectionPad2d(int(np.ceil((kernel_size - 1.) / 2))) + self.conv2d = nn.Conv2d(in_channels, out_channels, kernel_size, stride, bias=bias) + + self.relu = ReluLayer(out_channels, relu_type) + self.norm = NormLayer(out_channels, norm_type=norm_type) + + def forward(self, x): + out = self.scale_func(x) + if self.use_pad: + out = self.reflection_pad(out) + out = self.conv2d(out) + out = self.norm(out) + out = self.relu(out) + return out + + +class ResidualBlock(nn.Module): + """ + Residual block recommended in: http://torch.ch/blog/2016/02/04/resnets.html + """ + + def __init__(self, c_in, c_out, relu_type='prelu', norm_type='bn', scale='none'): + super(ResidualBlock, self).__init__() + + if scale == 'none' and c_in == c_out: + self.shortcut_func = lambda x: x + else: + self.shortcut_func = ConvLayer(c_in, c_out, 3, scale) + + scale_config_dict = {'down': ['none', 'down'], 'up': ['up', 'none'], 'none': ['none', 'none']} + scale_conf = scale_config_dict[scale] + + self.conv1 = ConvLayer(c_in, c_out, 3, scale_conf[0], norm_type=norm_type, relu_type=relu_type) + self.conv2 = ConvLayer(c_out, c_out, 3, scale_conf[1], norm_type=norm_type, relu_type='none') + + def forward(self, x): + identity = self.shortcut_func(x) + + res = self.conv1(x) + res = self.conv2(res) + return identity + res + + +class ParseNet(nn.Module): + + def __init__(self, + in_size=128, + out_size=128, + min_feat_size=32, + base_ch=64, + parsing_ch=19, + res_depth=10, + relu_type='LeakyReLU', + norm_type='bn', + ch_range=[32, 256]): + super().__init__() + self.res_depth = res_depth + act_args = {'norm_type': norm_type, 'relu_type': relu_type} + min_ch, max_ch = ch_range + + ch_clip = lambda x: max(min_ch, min(x, max_ch)) # noqa: E731 + min_feat_size = min(in_size, min_feat_size) + + down_steps = int(np.log2(in_size // min_feat_size)) + up_steps = int(np.log2(out_size // min_feat_size)) + + # =============== define encoder-body-decoder ==================== + self.encoder = [] + self.encoder.append(ConvLayer(3, base_ch, 3, 1)) + head_ch = base_ch + for i in range(down_steps): + cin, cout = ch_clip(head_ch), ch_clip(head_ch * 2) + self.encoder.append(ResidualBlock(cin, cout, scale='down', **act_args)) + head_ch = head_ch * 2 + + self.body = [] + for i in range(res_depth): + self.body.append(ResidualBlock(ch_clip(head_ch), ch_clip(head_ch), **act_args)) + + self.decoder = [] + for i in range(up_steps): + cin, cout = ch_clip(head_ch), ch_clip(head_ch // 2) + self.decoder.append(ResidualBlock(cin, cout, scale='up', **act_args)) + head_ch = head_ch // 2 + + self.encoder = nn.Sequential(*self.encoder) + self.body = nn.Sequential(*self.body) + self.decoder = nn.Sequential(*self.decoder) + self.out_img_conv = ConvLayer(ch_clip(head_ch), 3) + self.out_mask_conv = ConvLayer(ch_clip(head_ch), parsing_ch) + + def forward(self, x): + feat = self.encoder(x) + x = feat + self.body(feat) + x = self.decoder(x) + out_img = self.out_img_conv(x) + out_mask = self.out_mask_conv(x) + return out_mask, out_img diff --git a/extras/facexlib/parsing/resnet.py b/extras/facexlib/parsing/resnet.py new file mode 100644 index 0000000000000000000000000000000000000000..fec8e82cf64469fb51be21ad5130217052addbda --- /dev/null +++ b/extras/facexlib/parsing/resnet.py @@ -0,0 +1,69 @@ +import torch.nn as nn +import torch.nn.functional as F + + +def conv3x3(in_planes, out_planes, stride=1): + """3x3 convolution with padding""" + return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False) + + +class BasicBlock(nn.Module): + + def __init__(self, in_chan, out_chan, stride=1): + super(BasicBlock, self).__init__() + self.conv1 = conv3x3(in_chan, out_chan, stride) + self.bn1 = nn.BatchNorm2d(out_chan) + self.conv2 = conv3x3(out_chan, out_chan) + self.bn2 = nn.BatchNorm2d(out_chan) + self.relu = nn.ReLU(inplace=True) + self.downsample = None + if in_chan != out_chan or stride != 1: + self.downsample = nn.Sequential( + nn.Conv2d(in_chan, out_chan, kernel_size=1, stride=stride, bias=False), + nn.BatchNorm2d(out_chan), + ) + + def forward(self, x): + residual = self.conv1(x) + residual = F.relu(self.bn1(residual)) + residual = self.conv2(residual) + residual = self.bn2(residual) + + shortcut = x + if self.downsample is not None: + shortcut = self.downsample(x) + + out = shortcut + residual + out = self.relu(out) + return out + + +def create_layer_basic(in_chan, out_chan, bnum, stride=1): + layers = [BasicBlock(in_chan, out_chan, stride=stride)] + for i in range(bnum - 1): + layers.append(BasicBlock(out_chan, out_chan, stride=1)) + return nn.Sequential(*layers) + + +class ResNet18(nn.Module): + + def __init__(self): + super(ResNet18, self).__init__() + self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, bias=False) + self.bn1 = nn.BatchNorm2d(64) + self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) + self.layer1 = create_layer_basic(64, 64, bnum=2, stride=1) + self.layer2 = create_layer_basic(64, 128, bnum=2, stride=2) + self.layer3 = create_layer_basic(128, 256, bnum=2, stride=2) + self.layer4 = create_layer_basic(256, 512, bnum=2, stride=2) + + def forward(self, x): + x = self.conv1(x) + x = F.relu(self.bn1(x)) + x = self.maxpool(x) + + x = self.layer1(x) + feat8 = self.layer2(x) # 1/8 + feat16 = self.layer3(feat8) # 1/16 + feat32 = self.layer4(feat16) # 1/32 + return feat8, feat16, feat32 diff --git a/extras/facexlib/utils/__init__.py b/extras/facexlib/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..706e077a4f6d32ad8e8dd839f19f955a97844e01 --- /dev/null +++ b/extras/facexlib/utils/__init__.py @@ -0,0 +1,7 @@ +from .face_utils import align_crop_face_landmarks, compute_increased_bbox, get_valid_bboxes, paste_face_back +from .misc import img2tensor, load_file_from_url, scandir + +__all__ = [ + 'align_crop_face_landmarks', 'compute_increased_bbox', 'get_valid_bboxes', 'load_file_from_url', 'paste_face_back', + 'img2tensor', 'scandir' +] diff --git a/extras/facexlib/utils/__pycache__/__init__.cpython-310.pyc b/extras/facexlib/utils/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ddd484d8d03aeb28a82ceae46f400039ad1b3d61 Binary files /dev/null and b/extras/facexlib/utils/__pycache__/__init__.cpython-310.pyc differ diff --git a/extras/facexlib/utils/__pycache__/face_restoration_helper.cpython-310.pyc b/extras/facexlib/utils/__pycache__/face_restoration_helper.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b3356655014ce90b919538e9ba1780c1d3049181 Binary files /dev/null and b/extras/facexlib/utils/__pycache__/face_restoration_helper.cpython-310.pyc differ diff --git a/extras/facexlib/utils/__pycache__/face_utils.cpython-310.pyc b/extras/facexlib/utils/__pycache__/face_utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..134a48521adb087f6f5c2a9e6e9548538d8697cc Binary files /dev/null and b/extras/facexlib/utils/__pycache__/face_utils.cpython-310.pyc differ diff --git a/extras/facexlib/utils/__pycache__/misc.cpython-310.pyc b/extras/facexlib/utils/__pycache__/misc.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f5c256678abd9ffa87a70162bf1d3e0b41377e5e Binary files /dev/null and b/extras/facexlib/utils/__pycache__/misc.cpython-310.pyc differ diff --git a/extras/facexlib/utils/face_restoration_helper.py b/extras/facexlib/utils/face_restoration_helper.py new file mode 100644 index 0000000000000000000000000000000000000000..2a39361aae2d62cdb752f8f7ba3e0e524400ad97 --- /dev/null +++ b/extras/facexlib/utils/face_restoration_helper.py @@ -0,0 +1,374 @@ +import cv2 +import numpy as np +import os +import torch +from torchvision.transforms.functional import normalize + +from extras.facexlib.detection import init_detection_model +from extras.facexlib.parsing import init_parsing_model +from extras.facexlib.utils.misc import img2tensor, imwrite + + +def get_largest_face(det_faces, h, w): + + def get_location(val, length): + if val < 0: + return 0 + elif val > length: + return length + else: + return val + + face_areas = [] + for det_face in det_faces: + left = get_location(det_face[0], w) + right = get_location(det_face[2], w) + top = get_location(det_face[1], h) + bottom = get_location(det_face[3], h) + face_area = (right - left) * (bottom - top) + face_areas.append(face_area) + largest_idx = face_areas.index(max(face_areas)) + return det_faces[largest_idx], largest_idx + + +def get_center_face(det_faces, h=0, w=0, center=None): + if center is not None: + center = np.array(center) + else: + center = np.array([w / 2, h / 2]) + center_dist = [] + for det_face in det_faces: + face_center = np.array([(det_face[0] + det_face[2]) / 2, (det_face[1] + det_face[3]) / 2]) + dist = np.linalg.norm(face_center - center) + center_dist.append(dist) + center_idx = center_dist.index(min(center_dist)) + return det_faces[center_idx], center_idx + + +class FaceRestoreHelper(object): + """Helper for the face restoration pipeline (base class).""" + + def __init__(self, + upscale_factor, + face_size=512, + crop_ratio=(1, 1), + det_model='retinaface_resnet50', + save_ext='png', + template_3points=False, + pad_blur=False, + use_parse=False, + device=None, + model_rootpath=None): + self.template_3points = template_3points # improve robustness + self.upscale_factor = upscale_factor + # the cropped face ratio based on the square face + self.crop_ratio = crop_ratio # (h, w) + assert (self.crop_ratio[0] >= 1 and self.crop_ratio[1] >= 1), 'crop ration only supports >=1' + self.face_size = (int(face_size * self.crop_ratio[1]), int(face_size * self.crop_ratio[0])) + + if self.template_3points: + self.face_template = np.array([[192, 240], [319, 240], [257, 371]]) + else: + # standard 5 landmarks for FFHQ faces with 512 x 512 + self.face_template = np.array([[192.98138, 239.94708], [318.90277, 240.1936], [256.63416, 314.01935], + [201.26117, 371.41043], [313.08905, 371.15118]]) + self.face_template = self.face_template * (face_size / 512.0) + if self.crop_ratio[0] > 1: + self.face_template[:, 1] += face_size * (self.crop_ratio[0] - 1) / 2 + if self.crop_ratio[1] > 1: + self.face_template[:, 0] += face_size * (self.crop_ratio[1] - 1) / 2 + self.save_ext = save_ext + self.pad_blur = pad_blur + if self.pad_blur is True: + self.template_3points = False + + self.all_landmarks_5 = [] + self.det_faces = [] + self.affine_matrices = [] + self.inverse_affine_matrices = [] + self.cropped_faces = [] + self.restored_faces = [] + self.pad_input_imgs = [] + + if device is None: + self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') + else: + self.device = device + + # init face detection model + self.face_det = init_detection_model(det_model, half=False, device=self.device, model_rootpath=model_rootpath) + + # init face parsing model + self.use_parse = use_parse + self.face_parse = init_parsing_model(model_name='parsenet', device=self.device, model_rootpath=model_rootpath) + + def set_upscale_factor(self, upscale_factor): + self.upscale_factor = upscale_factor + + def read_image(self, img): + """img can be image path or cv2 loaded image.""" + # self.input_img is Numpy array, (h, w, c), BGR, uint8, [0, 255] + if isinstance(img, str): + img = cv2.imread(img) + + if np.max(img) > 256: # 16-bit image + img = img / 65535 * 255 + if len(img.shape) == 2: # gray image + img = cv2.cvtColor(img, cv2.COLOR_GRAY2BGR) + elif img.shape[2] == 4: # RGBA image with alpha channel + img = img[:, :, 0:3] + + self.input_img = img + + def get_face_landmarks_5(self, + only_keep_largest=False, + only_center_face=False, + resize=None, + blur_ratio=0.01, + eye_dist_threshold=None): + if resize is None: + scale = 1 + input_img = self.input_img + else: + h, w = self.input_img.shape[0:2] + scale = min(h, w) / resize + h, w = int(h / scale), int(w / scale) + input_img = cv2.resize(self.input_img, (w, h), interpolation=cv2.INTER_LANCZOS4) + + with torch.no_grad(): + bboxes = self.face_det.detect_faces(input_img, 0.97) * scale + for bbox in bboxes: + # remove faces with too small eye distance: side faces or too small faces + eye_dist = np.linalg.norm([bbox[5] - bbox[7], bbox[6] - bbox[8]]) + if eye_dist_threshold is not None and (eye_dist < eye_dist_threshold): + continue + + if self.template_3points: + landmark = np.array([[bbox[i], bbox[i + 1]] for i in range(5, 11, 2)]) + else: + landmark = np.array([[bbox[i], bbox[i + 1]] for i in range(5, 15, 2)]) + self.all_landmarks_5.append(landmark) + self.det_faces.append(bbox[0:5]) + if len(self.det_faces) == 0: + return 0 + if only_keep_largest: + h, w, _ = self.input_img.shape + self.det_faces, largest_idx = get_largest_face(self.det_faces, h, w) + self.all_landmarks_5 = [self.all_landmarks_5[largest_idx]] + elif only_center_face: + h, w, _ = self.input_img.shape + self.det_faces, center_idx = get_center_face(self.det_faces, h, w) + self.all_landmarks_5 = [self.all_landmarks_5[center_idx]] + + # pad blurry images + if self.pad_blur: + self.pad_input_imgs = [] + for landmarks in self.all_landmarks_5: + # get landmarks + eye_left = landmarks[0, :] + eye_right = landmarks[1, :] + eye_avg = (eye_left + eye_right) * 0.5 + mouth_avg = (landmarks[3, :] + landmarks[4, :]) * 0.5 + eye_to_eye = eye_right - eye_left + eye_to_mouth = mouth_avg - eye_avg + + # Get the oriented crop rectangle + # x: half width of the oriented crop rectangle + x = eye_to_eye - np.flipud(eye_to_mouth) * [-1, 1] + # - np.flipud(eye_to_mouth) * [-1, 1]: rotate 90 clockwise + # norm with the hypotenuse: get the direction + x /= np.hypot(*x) # get the hypotenuse of a right triangle + rect_scale = 1.5 + x *= max(np.hypot(*eye_to_eye) * 2.0 * rect_scale, np.hypot(*eye_to_mouth) * 1.8 * rect_scale) + # y: half height of the oriented crop rectangle + y = np.flipud(x) * [-1, 1] + + # c: center + c = eye_avg + eye_to_mouth * 0.1 + # quad: (left_top, left_bottom, right_bottom, right_top) + quad = np.stack([c - x - y, c - x + y, c + x + y, c + x - y]) + # qsize: side length of the square + qsize = np.hypot(*x) * 2 + border = max(int(np.rint(qsize * 0.1)), 3) + + # get pad + # pad: (width_left, height_top, width_right, height_bottom) + pad = (int(np.floor(min(quad[:, 0]))), int(np.floor(min(quad[:, 1]))), int(np.ceil(max(quad[:, 0]))), + int(np.ceil(max(quad[:, 1])))) + pad = [ + max(-pad[0] + border, 1), + max(-pad[1] + border, 1), + max(pad[2] - self.input_img.shape[0] + border, 1), + max(pad[3] - self.input_img.shape[1] + border, 1) + ] + + if max(pad) > 1: + # pad image + pad_img = np.pad(self.input_img, ((pad[1], pad[3]), (pad[0], pad[2]), (0, 0)), 'reflect') + # modify landmark coords + landmarks[:, 0] += pad[0] + landmarks[:, 1] += pad[1] + # blur pad images + h, w, _ = pad_img.shape + y, x, _ = np.ogrid[:h, :w, :1] + mask = np.maximum(1.0 - np.minimum(np.float32(x) / pad[0], + np.float32(w - 1 - x) / pad[2]), + 1.0 - np.minimum(np.float32(y) / pad[1], + np.float32(h - 1 - y) / pad[3])) + blur = int(qsize * blur_ratio) + if blur % 2 == 0: + blur += 1 + blur_img = cv2.boxFilter(pad_img, 0, ksize=(blur, blur)) + # blur_img = cv2.GaussianBlur(pad_img, (blur, blur), 0) + + pad_img = pad_img.astype('float32') + pad_img += (blur_img - pad_img) * np.clip(mask * 3.0 + 1.0, 0.0, 1.0) + pad_img += (np.median(pad_img, axis=(0, 1)) - pad_img) * np.clip(mask, 0.0, 1.0) + pad_img = np.clip(pad_img, 0, 255) # float32, [0, 255] + self.pad_input_imgs.append(pad_img) + else: + self.pad_input_imgs.append(np.copy(self.input_img)) + + return len(self.all_landmarks_5) + + def align_warp_face(self, save_cropped_path=None, border_mode='constant'): + """Align and warp faces with face template. + """ + if self.pad_blur: + assert len(self.pad_input_imgs) == len( + self.all_landmarks_5), f'Mismatched samples: {len(self.pad_input_imgs)} and {len(self.all_landmarks_5)}' + for idx, landmark in enumerate(self.all_landmarks_5): + # use 5 landmarks to get affine matrix + # use cv2.LMEDS method for the equivalence to skimage transform + # ref: https://blog.csdn.net/yichxi/article/details/115827338 + affine_matrix = cv2.estimateAffinePartial2D(landmark, self.face_template, method=cv2.LMEDS)[0] + self.affine_matrices.append(affine_matrix) + # warp and crop faces + if border_mode == 'constant': + border_mode = cv2.BORDER_CONSTANT + elif border_mode == 'reflect101': + border_mode = cv2.BORDER_REFLECT101 + elif border_mode == 'reflect': + border_mode = cv2.BORDER_REFLECT + if self.pad_blur: + input_img = self.pad_input_imgs[idx] + else: + input_img = self.input_img + cropped_face = cv2.warpAffine( + input_img, affine_matrix, self.face_size, borderMode=border_mode, borderValue=(135, 133, 132)) # gray + self.cropped_faces.append(cropped_face) + # save the cropped face + if save_cropped_path is not None: + path = os.path.splitext(save_cropped_path)[0] + save_path = f'{path}_{idx:02d}.{self.save_ext}' + imwrite(cropped_face, save_path) + + def get_inverse_affine(self, save_inverse_affine_path=None): + """Get inverse affine matrix.""" + for idx, affine_matrix in enumerate(self.affine_matrices): + inverse_affine = cv2.invertAffineTransform(affine_matrix) + inverse_affine *= self.upscale_factor + self.inverse_affine_matrices.append(inverse_affine) + # save inverse affine matrices + if save_inverse_affine_path is not None: + path, _ = os.path.splitext(save_inverse_affine_path) + save_path = f'{path}_{idx:02d}.pth' + torch.save(inverse_affine, save_path) + + def add_restored_face(self, face): + self.restored_faces.append(face) + + def paste_faces_to_input_image(self, save_path=None, upsample_img=None): + h, w, _ = self.input_img.shape + h_up, w_up = int(h * self.upscale_factor), int(w * self.upscale_factor) + + if upsample_img is None: + # simply resize the background + upsample_img = cv2.resize(self.input_img, (w_up, h_up), interpolation=cv2.INTER_LANCZOS4) + else: + upsample_img = cv2.resize(upsample_img, (w_up, h_up), interpolation=cv2.INTER_LANCZOS4) + + assert len(self.restored_faces) == len( + self.inverse_affine_matrices), ('length of restored_faces and affine_matrices are different.') + for restored_face, inverse_affine in zip(self.restored_faces, self.inverse_affine_matrices): + # Add an offset to inverse affine matrix, for more precise back alignment + if self.upscale_factor > 1: + extra_offset = 0.5 * self.upscale_factor + else: + extra_offset = 0 + inverse_affine[:, 2] += extra_offset + inv_restored = cv2.warpAffine(restored_face, inverse_affine, (w_up, h_up)) + + if self.use_parse: + # inference + face_input = cv2.resize(restored_face, (512, 512), interpolation=cv2.INTER_LINEAR) + face_input = img2tensor(face_input.astype('float32') / 255., bgr2rgb=True, float32=True) + normalize(face_input, (0.5, 0.5, 0.5), (0.5, 0.5, 0.5), inplace=True) + face_input = torch.unsqueeze(face_input, 0).to(self.device) + with torch.no_grad(): + out = self.face_parse(face_input)[0] + out = out.argmax(dim=1).squeeze().cpu().numpy() + + mask = np.zeros(out.shape) + MASK_COLORMAP = [0, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 0, 255, 0, 0, 0] + for idx, color in enumerate(MASK_COLORMAP): + mask[out == idx] = color + # blur the mask + mask = cv2.GaussianBlur(mask, (101, 101), 11) + mask = cv2.GaussianBlur(mask, (101, 101), 11) + # remove the black borders + thres = 10 + mask[:thres, :] = 0 + mask[-thres:, :] = 0 + mask[:, :thres] = 0 + mask[:, -thres:] = 0 + mask = mask / 255. + + mask = cv2.resize(mask, restored_face.shape[:2]) + mask = cv2.warpAffine(mask, inverse_affine, (w_up, h_up), flags=3) + inv_soft_mask = mask[:, :, None] + pasted_face = inv_restored + + else: # use square parse maps + mask = np.ones(self.face_size, dtype=np.float32) + inv_mask = cv2.warpAffine(mask, inverse_affine, (w_up, h_up)) + # remove the black borders + inv_mask_erosion = cv2.erode( + inv_mask, np.ones((int(2 * self.upscale_factor), int(2 * self.upscale_factor)), np.uint8)) + pasted_face = inv_mask_erosion[:, :, None] * inv_restored + total_face_area = np.sum(inv_mask_erosion) # // 3 + # compute the fusion edge based on the area of face + w_edge = int(total_face_area**0.5) // 20 + erosion_radius = w_edge * 2 + inv_mask_center = cv2.erode(inv_mask_erosion, np.ones((erosion_radius, erosion_radius), np.uint8)) + blur_size = w_edge * 2 + inv_soft_mask = cv2.GaussianBlur(inv_mask_center, (blur_size + 1, blur_size + 1), 0) + if len(upsample_img.shape) == 2: # upsample_img is gray image + upsample_img = upsample_img[:, :, None] + inv_soft_mask = inv_soft_mask[:, :, None] + + if len(upsample_img.shape) == 3 and upsample_img.shape[2] == 4: # alpha channel + alpha = upsample_img[:, :, 3:] + upsample_img = inv_soft_mask * pasted_face + (1 - inv_soft_mask) * upsample_img[:, :, 0:3] + upsample_img = np.concatenate((upsample_img, alpha), axis=2) + else: + upsample_img = inv_soft_mask * pasted_face + (1 - inv_soft_mask) * upsample_img + + if np.max(upsample_img) > 256: # 16-bit image + upsample_img = upsample_img.astype(np.uint16) + else: + upsample_img = upsample_img.astype(np.uint8) + if save_path is not None: + path = os.path.splitext(save_path)[0] + save_path = f'{path}.{self.save_ext}' + imwrite(upsample_img, save_path) + return upsample_img + + def clean_all(self): + self.all_landmarks_5 = [] + self.restored_faces = [] + self.affine_matrices = [] + self.cropped_faces = [] + self.inverse_affine_matrices = [] + self.det_faces = [] + self.pad_input_imgs = [] diff --git a/extras/facexlib/utils/face_utils.py b/extras/facexlib/utils/face_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..0bbe43c81110b82daaef09e6277d9c11f62e4bb4 --- /dev/null +++ b/extras/facexlib/utils/face_utils.py @@ -0,0 +1,250 @@ +import cv2 +import numpy as np +import torch + + +def compute_increased_bbox(bbox, increase_area, preserve_aspect=True): + left, top, right, bot = bbox + width = right - left + height = bot - top + + if preserve_aspect: + width_increase = max(increase_area, ((1 + 2 * increase_area) * height - width) / (2 * width)) + height_increase = max(increase_area, ((1 + 2 * increase_area) * width - height) / (2 * height)) + else: + width_increase = height_increase = increase_area + left = int(left - width_increase * width) + top = int(top - height_increase * height) + right = int(right + width_increase * width) + bot = int(bot + height_increase * height) + return (left, top, right, bot) + + +def get_valid_bboxes(bboxes, h, w): + left = max(bboxes[0], 0) + top = max(bboxes[1], 0) + right = min(bboxes[2], w) + bottom = min(bboxes[3], h) + return (left, top, right, bottom) + + +def align_crop_face_landmarks(img, + landmarks, + output_size, + transform_size=None, + enable_padding=True, + return_inverse_affine=False, + shrink_ratio=(1, 1)): + """Align and crop face with landmarks. + + The output_size and transform_size are based on width. The height is + adjusted based on shrink_ratio_h/shring_ration_w. + + Modified from: + https://github.com/NVlabs/ffhq-dataset/blob/master/download_ffhq.py + + Args: + img (Numpy array): Input image. + landmarks (Numpy array): 5 or 68 or 98 landmarks. + output_size (int): Output face size. + transform_size (ing): Transform size. Usually the four time of + output_size. + enable_padding (float): Default: True. + shrink_ratio (float | tuple[float] | list[float]): Shring the whole + face for height and width (crop larger area). Default: (1, 1). + + Returns: + (Numpy array): Cropped face. + """ + lm_type = 'retinaface_5' # Options: dlib_5, retinaface_5 + + if isinstance(shrink_ratio, (float, int)): + shrink_ratio = (shrink_ratio, shrink_ratio) + if transform_size is None: + transform_size = output_size * 4 + + # Parse landmarks + lm = np.array(landmarks) + if lm.shape[0] == 5 and lm_type == 'retinaface_5': + eye_left = lm[0] + eye_right = lm[1] + mouth_avg = (lm[3] + lm[4]) * 0.5 + elif lm.shape[0] == 5 and lm_type == 'dlib_5': + lm_eye_left = lm[2:4] + lm_eye_right = lm[0:2] + eye_left = np.mean(lm_eye_left, axis=0) + eye_right = np.mean(lm_eye_right, axis=0) + mouth_avg = lm[4] + elif lm.shape[0] == 68: + lm_eye_left = lm[36:42] + lm_eye_right = lm[42:48] + eye_left = np.mean(lm_eye_left, axis=0) + eye_right = np.mean(lm_eye_right, axis=0) + mouth_avg = (lm[48] + lm[54]) * 0.5 + elif lm.shape[0] == 98: + lm_eye_left = lm[60:68] + lm_eye_right = lm[68:76] + eye_left = np.mean(lm_eye_left, axis=0) + eye_right = np.mean(lm_eye_right, axis=0) + mouth_avg = (lm[76] + lm[82]) * 0.5 + + eye_avg = (eye_left + eye_right) * 0.5 + eye_to_eye = eye_right - eye_left + eye_to_mouth = mouth_avg - eye_avg + + # Get the oriented crop rectangle + # x: half width of the oriented crop rectangle + x = eye_to_eye - np.flipud(eye_to_mouth) * [-1, 1] + # - np.flipud(eye_to_mouth) * [-1, 1]: rotate 90 clockwise + # norm with the hypotenuse: get the direction + x /= np.hypot(*x) # get the hypotenuse of a right triangle + rect_scale = 1 # TODO: you can edit it to get larger rect + x *= max(np.hypot(*eye_to_eye) * 2.0 * rect_scale, np.hypot(*eye_to_mouth) * 1.8 * rect_scale) + # y: half height of the oriented crop rectangle + y = np.flipud(x) * [-1, 1] + + x *= shrink_ratio[1] # width + y *= shrink_ratio[0] # height + + # c: center + c = eye_avg + eye_to_mouth * 0.1 + # quad: (left_top, left_bottom, right_bottom, right_top) + quad = np.stack([c - x - y, c - x + y, c + x + y, c + x - y]) + # qsize: side length of the square + qsize = np.hypot(*x) * 2 + + quad_ori = np.copy(quad) + # Shrink, for large face + # TODO: do we really need shrink + shrink = int(np.floor(qsize / output_size * 0.5)) + if shrink > 1: + h, w = img.shape[0:2] + rsize = (int(np.rint(float(w) / shrink)), int(np.rint(float(h) / shrink))) + img = cv2.resize(img, rsize, interpolation=cv2.INTER_AREA) + quad /= shrink + qsize /= shrink + + # Crop + h, w = img.shape[0:2] + border = max(int(np.rint(qsize * 0.1)), 3) + crop = (int(np.floor(min(quad[:, 0]))), int(np.floor(min(quad[:, 1]))), int(np.ceil(max(quad[:, 0]))), + int(np.ceil(max(quad[:, 1])))) + crop = (max(crop[0] - border, 0), max(crop[1] - border, 0), min(crop[2] + border, w), min(crop[3] + border, h)) + if crop[2] - crop[0] < w or crop[3] - crop[1] < h: + img = img[crop[1]:crop[3], crop[0]:crop[2], :] + quad -= crop[0:2] + + # Pad + # pad: (width_left, height_top, width_right, height_bottom) + h, w = img.shape[0:2] + pad = (int(np.floor(min(quad[:, 0]))), int(np.floor(min(quad[:, 1]))), int(np.ceil(max(quad[:, 0]))), + int(np.ceil(max(quad[:, 1])))) + pad = (max(-pad[0] + border, 0), max(-pad[1] + border, 0), max(pad[2] - w + border, 0), max(pad[3] - h + border, 0)) + if enable_padding and max(pad) > border - 4: + pad = np.maximum(pad, int(np.rint(qsize * 0.3))) + img = np.pad(img, ((pad[1], pad[3]), (pad[0], pad[2]), (0, 0)), 'reflect') + h, w = img.shape[0:2] + y, x, _ = np.ogrid[:h, :w, :1] + mask = np.maximum(1.0 - np.minimum(np.float32(x) / pad[0], + np.float32(w - 1 - x) / pad[2]), + 1.0 - np.minimum(np.float32(y) / pad[1], + np.float32(h - 1 - y) / pad[3])) + blur = int(qsize * 0.02) + if blur % 2 == 0: + blur += 1 + blur_img = cv2.boxFilter(img, 0, ksize=(blur, blur)) + + img = img.astype('float32') + img += (blur_img - img) * np.clip(mask * 3.0 + 1.0, 0.0, 1.0) + img += (np.median(img, axis=(0, 1)) - img) * np.clip(mask, 0.0, 1.0) + img = np.clip(img, 0, 255) # float32, [0, 255] + quad += pad[:2] + + # Transform use cv2 + h_ratio = shrink_ratio[0] / shrink_ratio[1] + dst_h, dst_w = int(transform_size * h_ratio), transform_size + template = np.array([[0, 0], [0, dst_h], [dst_w, dst_h], [dst_w, 0]]) + # use cv2.LMEDS method for the equivalence to skimage transform + # ref: https://blog.csdn.net/yichxi/article/details/115827338 + affine_matrix = cv2.estimateAffinePartial2D(quad, template, method=cv2.LMEDS)[0] + cropped_face = cv2.warpAffine( + img, affine_matrix, (dst_w, dst_h), borderMode=cv2.BORDER_CONSTANT, borderValue=(135, 133, 132)) # gray + + if output_size < transform_size: + cropped_face = cv2.resize( + cropped_face, (output_size, int(output_size * h_ratio)), interpolation=cv2.INTER_LINEAR) + + if return_inverse_affine: + dst_h, dst_w = int(output_size * h_ratio), output_size + template = np.array([[0, 0], [0, dst_h], [dst_w, dst_h], [dst_w, 0]]) + # use cv2.LMEDS method for the equivalence to skimage transform + # ref: https://blog.csdn.net/yichxi/article/details/115827338 + affine_matrix = cv2.estimateAffinePartial2D( + quad_ori, np.array([[0, 0], [0, output_size], [dst_w, dst_h], [dst_w, 0]]), method=cv2.LMEDS)[0] + inverse_affine = cv2.invertAffineTransform(affine_matrix) + else: + inverse_affine = None + return cropped_face, inverse_affine + + +def paste_face_back(img, face, inverse_affine): + h, w = img.shape[0:2] + face_h, face_w = face.shape[0:2] + inv_restored = cv2.warpAffine(face, inverse_affine, (w, h)) + mask = np.ones((face_h, face_w, 3), dtype=np.float32) + inv_mask = cv2.warpAffine(mask, inverse_affine, (w, h)) + # remove the black borders + inv_mask_erosion = cv2.erode(inv_mask, np.ones((2, 2), np.uint8)) + inv_restored_remove_border = inv_mask_erosion * inv_restored + total_face_area = np.sum(inv_mask_erosion) // 3 + # compute the fusion edge based on the area of face + w_edge = int(total_face_area**0.5) // 20 + erosion_radius = w_edge * 2 + inv_mask_center = cv2.erode(inv_mask_erosion, np.ones((erosion_radius, erosion_radius), np.uint8)) + blur_size = w_edge * 2 + inv_soft_mask = cv2.GaussianBlur(inv_mask_center, (blur_size + 1, blur_size + 1), 0) + img = inv_soft_mask * inv_restored_remove_border + (1 - inv_soft_mask) * img + # float32, [0, 255] + return img + + +if __name__ == '__main__': + import os + + from extras.facexlib.detection import init_detection_model + from extras.facexlib.utils.face_restoration_helper import get_largest_face + from extras.facexlib.visualization import visualize_detection + + img_path = '/home/wxt/datasets/ffhq/ffhq_wild/00009.png' + img_name = os.splitext(os.path.basename(img_path))[0] + + # initialize model + det_net = init_detection_model('retinaface_resnet50', half=False) + img_ori = cv2.imread(img_path) + h, w = img_ori.shape[0:2] + # if larger than 800, scale it + scale = max(h / 800, w / 800) + if scale > 1: + img = cv2.resize(img_ori, (int(w / scale), int(h / scale)), interpolation=cv2.INTER_LINEAR) + + with torch.no_grad(): + bboxes = det_net.detect_faces(img, 0.97) + if scale > 1: + bboxes *= scale # the score is incorrect + bboxes = get_largest_face(bboxes, h, w)[0] + visualize_detection(img_ori, [bboxes], f'tmp/{img_name}_det.png') + + landmarks = np.array([[bboxes[i], bboxes[i + 1]] for i in range(5, 15, 2)]) + + cropped_face, inverse_affine = align_crop_face_landmarks( + img_ori, + landmarks, + output_size=512, + transform_size=None, + enable_padding=True, + return_inverse_affine=True, + shrink_ratio=(1, 1)) + + cv2.imwrite(f'tmp/{img_name}_cropeed_face.png', cropped_face) + img = paste_face_back(img_ori, cropped_face, inverse_affine) + cv2.imwrite(f'tmp/{img_name}_back.png', img) diff --git a/extras/facexlib/utils/misc.py b/extras/facexlib/utils/misc.py new file mode 100644 index 0000000000000000000000000000000000000000..b1a597ce7ecc476ef74ee3eb83d5fdfdfdbf0679 --- /dev/null +++ b/extras/facexlib/utils/misc.py @@ -0,0 +1,118 @@ +import cv2 +import os +import os.path as osp +import torch +from torch.hub import download_url_to_file, get_dir +from urllib.parse import urlparse + +ROOT_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + + +def imwrite(img, file_path, params=None, auto_mkdir=True): + """Write image to file. + + Args: + img (ndarray): Image array to be written. + file_path (str): Image file path. + params (None or list): Same as opencv's :func:`imwrite` interface. + auto_mkdir (bool): If the parent folder of `file_path` does not exist, + whether to create it automatically. + + Returns: + bool: Successful or not. + """ + if auto_mkdir: + dir_name = os.path.abspath(os.path.dirname(file_path)) + os.makedirs(dir_name, exist_ok=True) + return cv2.imwrite(file_path, img, params) + + +def img2tensor(imgs, bgr2rgb=True, float32=True): + """Numpy array to tensor. + + Args: + imgs (list[ndarray] | ndarray): Input images. + bgr2rgb (bool): Whether to change bgr to rgb. + float32 (bool): Whether to change to float32. + + Returns: + list[tensor] | tensor: Tensor images. If returned results only have + one element, just return tensor. + """ + + def _totensor(img, bgr2rgb, float32): + if img.shape[2] == 3 and bgr2rgb: + if img.dtype == 'float64': + img = img.astype('float32') + img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) + img = torch.from_numpy(img.transpose(2, 0, 1)) + if float32: + img = img.float() + return img + + if isinstance(imgs, list): + return [_totensor(img, bgr2rgb, float32) for img in imgs] + else: + return _totensor(imgs, bgr2rgb, float32) + + +def load_file_from_url(url, model_dir=None, progress=True, file_name=None, save_dir=None): + """Ref:https://github.com/1adrianb/face-alignment/blob/master/face_alignment/utils.py + """ + if model_dir is None: + hub_dir = get_dir() + model_dir = os.path.join(hub_dir, 'checkpoints') + + if save_dir is None: + save_dir = os.path.join(ROOT_DIR, model_dir) + os.makedirs(save_dir, exist_ok=True) + + parts = urlparse(url) + filename = os.path.basename(parts.path) + if file_name is not None: + filename = file_name + cached_file = os.path.abspath(os.path.join(save_dir, filename)) + if not os.path.exists(cached_file): + print(f'Downloading: "{url}" to {cached_file}\n') + download_url_to_file(url, cached_file, hash_prefix=None, progress=progress) + return cached_file + + +def scandir(dir_path, suffix=None, recursive=False, full_path=False): + """Scan a directory to find the interested files. + Args: + dir_path (str): Path of the directory. + suffix (str | tuple(str), optional): File suffix that we are + interested in. Default: None. + recursive (bool, optional): If set to True, recursively scan the + directory. Default: False. + full_path (bool, optional): If set to True, include the dir_path. + Default: False. + Returns: + A generator for all the interested files with relative paths. + """ + + if (suffix is not None) and not isinstance(suffix, (str, tuple)): + raise TypeError('"suffix" must be a string or tuple of strings') + + root = dir_path + + def _scandir(dir_path, suffix, recursive): + for entry in os.scandir(dir_path): + if not entry.name.startswith('.') and entry.is_file(): + if full_path: + return_path = entry.path + else: + return_path = osp.relpath(entry.path, root) + + if suffix is None: + yield return_path + elif return_path.endswith(suffix): + yield return_path + else: + if recursive: + yield from _scandir(entry.path, suffix=suffix, recursive=recursive) + else: + continue + + return _scandir(dir_path, suffix=suffix, recursive=recursive) diff --git a/extras/interrogate.py b/extras/interrogate.py new file mode 100644 index 0000000000000000000000000000000000000000..410d685f649421f594453ae01ee71c1f88ac2222 --- /dev/null +++ b/extras/interrogate.py @@ -0,0 +1,63 @@ +import os +import torch +import ldm_patched.modules.model_management as model_management + +from torchvision import transforms +from torchvision.transforms.functional import InterpolationMode +from modules.model_loader import load_file_from_url +from modules.config import path_clip_vision +from ldm_patched.modules.model_patcher import ModelPatcher +from extras.BLIP.models.blip import blip_decoder + + +blip_image_eval_size = 384 +blip_repo_root = os.path.join(os.path.dirname(__file__), 'BLIP') + + +class Interrogator: + def __init__(self): + self.blip_model = None + self.load_device = torch.device('cpu') + self.offload_device = torch.device('cpu') + self.dtype = torch.float32 + + @torch.no_grad() + @torch.inference_mode() + def interrogate(self, img_rgb): + if self.blip_model is None: + filename = load_file_from_url( + url='https://huggingface.co/lllyasviel/misc/resolve/main/model_base_caption_capfilt_large.pth', + model_dir=path_clip_vision, + file_name='model_base_caption_capfilt_large.pth', + ) + + model = blip_decoder(pretrained=filename, image_size=blip_image_eval_size, vit='base', + med_config=os.path.join(blip_repo_root, "configs", "med_config.json")) + model.eval() + + self.load_device = model_management.text_encoder_device() + self.offload_device = model_management.text_encoder_offload_device() + self.dtype = torch.float32 + + model.to(self.offload_device) + + if model_management.should_use_fp16(device=self.load_device): + model.half() + self.dtype = torch.float16 + + self.blip_model = ModelPatcher(model, load_device=self.load_device, offload_device=self.offload_device) + + model_management.load_model_gpu(self.blip_model) + + gpu_image = transforms.Compose([ + transforms.ToTensor(), + transforms.Resize((blip_image_eval_size, blip_image_eval_size), interpolation=InterpolationMode.BICUBIC), + transforms.Normalize((0.48145466, 0.4578275, 0.40821073), (0.26862954, 0.26130258, 0.27577711)) + ])(img_rgb).unsqueeze(0).to(device=self.load_device, dtype=self.dtype) + + caption = self.blip_model.model.generate(gpu_image, sample=True, num_beams=1, max_length=75)[0] + + return caption + + +default_interrogator = Interrogator().interrogate diff --git a/extras/ip_adapter.py b/extras/ip_adapter.py new file mode 100644 index 0000000000000000000000000000000000000000..22527d244954db606b5eff72e30037938b0cab2d --- /dev/null +++ b/extras/ip_adapter.py @@ -0,0 +1,284 @@ +import torch +import ldm_patched.modules.clip_vision +import safetensors.torch as sf +import ldm_patched.modules.model_management as model_management +import ldm_patched.ldm.modules.attention as attention + +from extras.resampler import Resampler +from ldm_patched.modules.model_patcher import ModelPatcher +from modules.core import numpy_to_pytorch +from modules.ops import use_patched_ops +from ldm_patched.modules.ops import manual_cast + + +SD_V12_CHANNELS = [320] * 4 + [640] * 4 + [1280] * 4 + [1280] * 6 + [640] * 6 + [320] * 6 + [1280] * 2 +SD_XL_CHANNELS = [640] * 8 + [1280] * 40 + [1280] * 60 + [640] * 12 + [1280] * 20 + + +def sdp(q, k, v, extra_options): + return attention.optimized_attention(q, k, v, heads=extra_options["n_heads"], mask=None) + + +class ImageProjModel(torch.nn.Module): + def __init__(self, cross_attention_dim=1024, clip_embeddings_dim=1024, clip_extra_context_tokens=4): + super().__init__() + + self.cross_attention_dim = cross_attention_dim + self.clip_extra_context_tokens = clip_extra_context_tokens + self.proj = torch.nn.Linear(clip_embeddings_dim, self.clip_extra_context_tokens * cross_attention_dim) + self.norm = torch.nn.LayerNorm(cross_attention_dim) + + def forward(self, image_embeds): + embeds = image_embeds + clip_extra_context_tokens = self.proj(embeds).reshape(-1, self.clip_extra_context_tokens, + self.cross_attention_dim) + clip_extra_context_tokens = self.norm(clip_extra_context_tokens) + return clip_extra_context_tokens + + +class To_KV(torch.nn.Module): + def __init__(self, cross_attention_dim): + super().__init__() + + channels = SD_XL_CHANNELS if cross_attention_dim == 2048 else SD_V12_CHANNELS + self.to_kvs = torch.nn.ModuleList( + [torch.nn.Linear(cross_attention_dim, channel, bias=False) for channel in channels]) + + def load_state_dict_ordered(self, sd): + state_dict = [] + for i in range(4096): + for k in ['k', 'v']: + key = f'{i}.to_{k}_ip.weight' + if key in sd: + state_dict.append(sd[key]) + for i, v in enumerate(state_dict): + self.to_kvs[i].weight = torch.nn.Parameter(v, requires_grad=False) + + +class IPAdapterModel(torch.nn.Module): + def __init__(self, state_dict, plus, cross_attention_dim=768, clip_embeddings_dim=1024, clip_extra_context_tokens=4, + sdxl_plus=False): + super().__init__() + self.plus = plus + if self.plus: + self.image_proj_model = Resampler( + dim=1280 if sdxl_plus else cross_attention_dim, + depth=4, + dim_head=64, + heads=20 if sdxl_plus else 12, + num_queries=clip_extra_context_tokens, + embedding_dim=clip_embeddings_dim, + output_dim=cross_attention_dim, + ff_mult=4 + ) + else: + self.image_proj_model = ImageProjModel( + cross_attention_dim=cross_attention_dim, + clip_embeddings_dim=clip_embeddings_dim, + clip_extra_context_tokens=clip_extra_context_tokens + ) + + self.image_proj_model.load_state_dict(state_dict["image_proj"]) + self.ip_layers = To_KV(cross_attention_dim) + self.ip_layers.load_state_dict_ordered(state_dict["ip_adapter"]) + + +clip_vision: ldm_patched.modules.clip_vision.ClipVisionModel = None +ip_negative: torch.Tensor = None +ip_adapters: dict = {} + + +def load_ip_adapter(clip_vision_path, ip_negative_path, ip_adapter_path): + global clip_vision, ip_negative, ip_adapters + + if clip_vision is None and isinstance(clip_vision_path, str): + clip_vision = ldm_patched.modules.clip_vision.load(clip_vision_path) + + if ip_negative is None and isinstance(ip_negative_path, str): + ip_negative = sf.load_file(ip_negative_path)['data'] + + if not isinstance(ip_adapter_path, str) or ip_adapter_path in ip_adapters: + return + + load_device = model_management.get_torch_device() + offload_device = torch.device('cpu') + + use_fp16 = model_management.should_use_fp16(device=load_device) + ip_state_dict = torch.load(ip_adapter_path, map_location="cpu") + plus = "latents" in ip_state_dict["image_proj"] + cross_attention_dim = ip_state_dict["ip_adapter"]["1.to_k_ip.weight"].shape[1] + sdxl = cross_attention_dim == 2048 + sdxl_plus = sdxl and plus + + if plus: + clip_extra_context_tokens = ip_state_dict["image_proj"]["latents"].shape[1] + clip_embeddings_dim = ip_state_dict["image_proj"]["latents"].shape[2] + else: + clip_extra_context_tokens = ip_state_dict["image_proj"]["proj.weight"].shape[0] // cross_attention_dim + clip_embeddings_dim = None + + with use_patched_ops(manual_cast): + ip_adapter = IPAdapterModel( + ip_state_dict, + plus=plus, + cross_attention_dim=cross_attention_dim, + clip_embeddings_dim=clip_embeddings_dim, + clip_extra_context_tokens=clip_extra_context_tokens, + sdxl_plus=sdxl_plus + ) + + ip_adapter.sdxl = sdxl + ip_adapter.load_device = load_device + ip_adapter.offload_device = offload_device + ip_adapter.dtype = torch.float16 if use_fp16 else torch.float32 + ip_adapter.to(offload_device, dtype=ip_adapter.dtype) + + image_proj_model = ModelPatcher(model=ip_adapter.image_proj_model, load_device=load_device, + offload_device=offload_device) + ip_layers = ModelPatcher(model=ip_adapter.ip_layers, load_device=load_device, + offload_device=offload_device) + + ip_adapters[ip_adapter_path] = dict( + ip_adapter=ip_adapter, + image_proj_model=image_proj_model, + ip_layers=ip_layers, + ip_unconds=None + ) + + return + + +@torch.no_grad() +@torch.inference_mode() +def clip_preprocess(image): + mean = torch.tensor([0.48145466, 0.4578275, 0.40821073], device=image.device, dtype=image.dtype).view([1, 3, 1, 1]) + std = torch.tensor([0.26862954, 0.26130258, 0.27577711], device=image.device, dtype=image.dtype).view([1, 3, 1, 1]) + image = image.movedim(-1, 1) + + # https://github.com/tencent-ailab/IP-Adapter/blob/d580c50a291566bbf9fc7ac0f760506607297e6d/README.md?plain=1#L75 + B, C, H, W = image.shape + assert H == 224 and W == 224 + + return (image - mean) / std + + +@torch.no_grad() +@torch.inference_mode() +def preprocess(img, ip_adapter_path): + global ip_adapters + entry = ip_adapters[ip_adapter_path] + + ldm_patched.modules.model_management.load_model_gpu(clip_vision.patcher) + pixel_values = clip_preprocess(numpy_to_pytorch(img).to(clip_vision.load_device)) + outputs = clip_vision.model(pixel_values=pixel_values, output_hidden_states=True) + + ip_adapter = entry['ip_adapter'] + ip_layers = entry['ip_layers'] + image_proj_model = entry['image_proj_model'] + ip_unconds = entry['ip_unconds'] + + if ip_adapter.plus: + cond = outputs.hidden_states[-2] + else: + cond = outputs.image_embeds + + cond = cond.to(device=ip_adapter.load_device, dtype=ip_adapter.dtype) + + ldm_patched.modules.model_management.load_model_gpu(image_proj_model) + cond = image_proj_model.model(cond).to(device=ip_adapter.load_device, dtype=ip_adapter.dtype) + + ldm_patched.modules.model_management.load_model_gpu(ip_layers) + + if ip_unconds is None: + uncond = ip_negative.to(device=ip_adapter.load_device, dtype=ip_adapter.dtype) + ip_unconds = [m(uncond).cpu() for m in ip_layers.model.to_kvs] + entry['ip_unconds'] = ip_unconds + + ip_conds = [m(cond).cpu() for m in ip_layers.model.to_kvs] + + return ip_conds, ip_unconds + + +@torch.no_grad() +@torch.inference_mode() +def patch_model(model, tasks): + new_model = model.clone() + + def make_attn_patcher(ip_index): + def patcher(n, context_attn2, value_attn2, extra_options): + org_dtype = n.dtype + current_step = float(model.model.diffusion_model.current_step.detach().cpu().numpy()[0]) + cond_or_uncond = extra_options['cond_or_uncond'] + + q = n + k = [context_attn2] + v = [value_attn2] + b, _, _ = q.shape + + for (cs, ucs), cn_stop, cn_weight in tasks: + if current_step < cn_stop: + ip_k_c = cs[ip_index * 2].to(q) + ip_v_c = cs[ip_index * 2 + 1].to(q) + ip_k_uc = ucs[ip_index * 2].to(q) + ip_v_uc = ucs[ip_index * 2 + 1].to(q) + + ip_k = torch.cat([(ip_k_c, ip_k_uc)[i] for i in cond_or_uncond], dim=0) + ip_v = torch.cat([(ip_v_c, ip_v_uc)[i] for i in cond_or_uncond], dim=0) + + # Midjourney's attention formulation of image prompt (non-official reimplementation) + # Written by Lvmin Zhang at Stanford University, 2023 Dec + # For non-commercial use only - if you use this in commercial project then + # probably it has some intellectual property issues. + # Contact lvminzhang@acm.org if you are not sure. + + # Below is the sensitive part with potential intellectual property issues. + + ip_v_mean = torch.mean(ip_v, dim=1, keepdim=True) + ip_v_offset = ip_v - ip_v_mean + + B, F, C = ip_k.shape + channel_penalty = float(C) / 1280.0 + weight = cn_weight * channel_penalty + + ip_k = ip_k * weight + ip_v = ip_v_offset + ip_v_mean * weight + + k.append(ip_k) + v.append(ip_v) + + k = torch.cat(k, dim=1) + v = torch.cat(v, dim=1) + out = sdp(q, k, v, extra_options) + + + return out.to(dtype=org_dtype) + return patcher + + def set_model_patch_replace(model, number, key): + to = model.model_options["transformer_options"] + if "patches_replace" not in to: + to["patches_replace"] = {} + if "attn2" not in to["patches_replace"]: + to["patches_replace"]["attn2"] = {} + if key not in to["patches_replace"]["attn2"]: + to["patches_replace"]["attn2"][key] = make_attn_patcher(number) + + number = 0 + + for id in [4, 5, 7, 8]: + block_indices = range(2) if id in [4, 5] else range(10) + for index in block_indices: + set_model_patch_replace(new_model, number, ("input", id, index)) + number += 1 + + for id in range(6): + block_indices = range(2) if id in [3, 4, 5] else range(10) + for index in block_indices: + set_model_patch_replace(new_model, number, ("output", id, index)) + number += 1 + + for index in range(10): + set_model_patch_replace(new_model, number, ("middle", 0, index)) + number += 1 + + return new_model diff --git a/extras/preprocessors.py b/extras/preprocessors.py new file mode 100644 index 0000000000000000000000000000000000000000..0aa83109aac2b1228317ea12ec22d5f8bb9de8d0 --- /dev/null +++ b/extras/preprocessors.py @@ -0,0 +1,81 @@ +import cv2 +import numpy as np + + +def centered_canny(x: np.ndarray, canny_low_threshold, canny_high_threshold): + assert isinstance(x, np.ndarray) + assert x.ndim == 2 and x.dtype == np.uint8 + + y = cv2.Canny(x, int(canny_low_threshold), int(canny_high_threshold)) + y = y.astype(np.float32) / 255.0 + return y + + +def centered_canny_color(x: np.ndarray, canny_low_threshold, canny_high_threshold): + assert isinstance(x, np.ndarray) + assert x.ndim == 3 and x.shape[2] == 3 + + result = [centered_canny(x[..., i], canny_low_threshold, canny_high_threshold) for i in range(3)] + result = np.stack(result, axis=2) + return result + + +def pyramid_canny_color(x: np.ndarray, canny_low_threshold, canny_high_threshold): + assert isinstance(x, np.ndarray) + assert x.ndim == 3 and x.shape[2] == 3 + + H, W, C = x.shape + acc_edge = None + + for k in [0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0]: + Hs, Ws = int(H * k), int(W * k) + small = cv2.resize(x, (Ws, Hs), interpolation=cv2.INTER_AREA) + edge = centered_canny_color(small, canny_low_threshold, canny_high_threshold) + if acc_edge is None: + acc_edge = edge + else: + acc_edge = cv2.resize(acc_edge, (edge.shape[1], edge.shape[0]), interpolation=cv2.INTER_LINEAR) + acc_edge = acc_edge * 0.75 + edge * 0.25 + + return acc_edge + + +def norm255(x, low=4, high=96): + assert isinstance(x, np.ndarray) + assert x.ndim == 2 and x.dtype == np.float32 + + v_min = np.percentile(x, low) + v_max = np.percentile(x, high) + + x -= v_min + x /= v_max - v_min + + return x * 255.0 + + +def canny_pyramid(x, canny_low_threshold, canny_high_threshold): + # For some reasons, SAI's Control-lora Canny seems to be trained on canny maps with non-standard resolutions. + # Then we use pyramid to use all resolutions to avoid missing any structure in specific resolutions. + + color_canny = pyramid_canny_color(x, canny_low_threshold, canny_high_threshold) + result = np.sum(color_canny, axis=2) + + return norm255(result, low=1, high=99).clip(0, 255).astype(np.uint8) + + +def cpds(x): + # cv2.decolor is not "decolor", it is Cewu Lu's method + # See http://www.cse.cuhk.edu.hk/leojia/projects/color2gray/index.html + # See https://docs.opencv.org/3.0-beta/modules/photo/doc/decolor.html + + raw = cv2.GaussianBlur(x, (0, 0), 0.8) + density, boost = cv2.decolor(raw) + + raw = raw.astype(np.float32) + density = density.astype(np.float32) + boost = boost.astype(np.float32) + + offset = np.sum((raw - boost) ** 2.0, axis=2) ** 0.5 + result = density + offset + + return norm255(result, low=4, high=96).clip(0, 255).astype(np.uint8) diff --git a/extras/resampler.py b/extras/resampler.py new file mode 100644 index 0000000000000000000000000000000000000000..539f309d4621cc432ec2937c48ef24b852652b8f --- /dev/null +++ b/extras/resampler.py @@ -0,0 +1,120 @@ +# modified from https://github.com/mlfoundations/open_flamingo/blob/main/open_flamingo/src/helpers.py +import math + +import torch +import torch.nn as nn + + +# FFN +def FeedForward(dim, mult=4): + inner_dim = int(dim * mult) + return nn.Sequential( + nn.LayerNorm(dim), + nn.Linear(dim, inner_dim, bias=False), + nn.GELU(), + nn.Linear(inner_dim, dim, bias=False), + ) + + +def reshape_tensor(x, heads): + bs, length, width = x.shape + #(bs, length, width) --> (bs, length, n_heads, dim_per_head) + x = x.view(bs, length, heads, -1) + # (bs, length, n_heads, dim_per_head) --> (bs, n_heads, length, dim_per_head) + x = x.transpose(1, 2) + # (bs, n_heads, length, dim_per_head) --> (bs*n_heads, length, dim_per_head) + x = x.reshape(bs, heads, length, -1) + return x + + +class PerceiverAttention(nn.Module): + def __init__(self, *, dim, dim_head=64, heads=8): + super().__init__() + self.scale = dim_head**-0.5 + self.dim_head = dim_head + self.heads = heads + inner_dim = dim_head * heads + + self.norm1 = nn.LayerNorm(dim) + self.norm2 = nn.LayerNorm(dim) + + self.to_q = nn.Linear(dim, inner_dim, bias=False) + self.to_kv = nn.Linear(dim, inner_dim * 2, bias=False) + self.to_out = nn.Linear(inner_dim, dim, bias=False) + + + def forward(self, x, latents): + """ + Args: + x (torch.Tensor): image features + shape (b, n1, D) + latent (torch.Tensor): latent features + shape (b, n2, D) + """ + x = self.norm1(x) + latents = self.norm2(latents) + + b, l, _ = latents.shape + + q = self.to_q(latents) + kv_input = torch.cat((x, latents), dim=-2) + k, v = self.to_kv(kv_input).chunk(2, dim=-1) + + q = reshape_tensor(q, self.heads) + k = reshape_tensor(k, self.heads) + v = reshape_tensor(v, self.heads) + + # attention + scale = 1 / math.sqrt(math.sqrt(self.dim_head)) + weight = (q * scale) @ (k * scale).transpose(-2, -1) # More stable with f16 than dividing afterwards + weight = torch.softmax(weight.float(), dim=-1).type(weight.dtype) + out = weight @ v + + out = out.permute(0, 2, 1, 3).reshape(b, l, -1) + + return self.to_out(out) + + +class Resampler(nn.Module): + def __init__( + self, + dim=1024, + depth=8, + dim_head=64, + heads=16, + num_queries=8, + embedding_dim=768, + output_dim=1024, + ff_mult=4, + ): + super().__init__() + + self.latents = nn.Parameter(torch.randn(1, num_queries, dim) / dim**0.5) + + self.proj_in = nn.Linear(embedding_dim, dim) + + self.proj_out = nn.Linear(dim, output_dim) + self.norm_out = nn.LayerNorm(output_dim) + + self.layers = nn.ModuleList([]) + for _ in range(depth): + self.layers.append( + nn.ModuleList( + [ + PerceiverAttention(dim=dim, dim_head=dim_head, heads=heads), + FeedForward(dim=dim, mult=ff_mult), + ] + ) + ) + + def forward(self, x): + latents = self.latents.repeat(x.size(0), 1, 1).to(x) + + x = self.proj_in(x) + + for attn, ff in self.layers: + latents = attn(x, latents) + latents + latents = ff(latents) + latents + + latents = self.proj_out(latents) + return self.norm_out(latents) diff --git a/extras/vae_interpose.py b/extras/vae_interpose.py new file mode 100644 index 0000000000000000000000000000000000000000..72fb09a415b3d283c1cdf10902ebe337c22c10a5 --- /dev/null +++ b/extras/vae_interpose.py @@ -0,0 +1,93 @@ +# https://github.com/city96/SD-Latent-Interposer/blob/main/interposer.py + +import os +import torch +import safetensors.torch as sf +import torch.nn as nn +import ldm_patched.modules.model_management + +from ldm_patched.modules.model_patcher import ModelPatcher +from modules.config import path_vae_approx + + +class Block(nn.Module): + def __init__(self, size): + super().__init__() + self.join = nn.ReLU() + self.long = nn.Sequential( + nn.Conv2d(size, size, kernel_size=3, stride=1, padding=1), + nn.LeakyReLU(0.1), + nn.Conv2d(size, size, kernel_size=3, stride=1, padding=1), + nn.LeakyReLU(0.1), + nn.Conv2d(size, size, kernel_size=3, stride=1, padding=1), + ) + + def forward(self, x): + y = self.long(x) + z = self.join(y + x) + return z + + +class Interposer(nn.Module): + def __init__(self): + super().__init__() + self.chan = 4 + self.hid = 128 + + self.head_join = nn.ReLU() + self.head_short = nn.Conv2d(self.chan, self.hid, kernel_size=3, stride=1, padding=1) + self.head_long = nn.Sequential( + nn.Conv2d(self.chan, self.hid, kernel_size=3, stride=1, padding=1), + nn.LeakyReLU(0.1), + nn.Conv2d(self.hid, self.hid, kernel_size=3, stride=1, padding=1), + nn.LeakyReLU(0.1), + nn.Conv2d(self.hid, self.hid, kernel_size=3, stride=1, padding=1), + ) + self.core = nn.Sequential( + Block(self.hid), + Block(self.hid), + Block(self.hid), + ) + self.tail = nn.Sequential( + nn.ReLU(), + nn.Conv2d(self.hid, self.chan, kernel_size=3, stride=1, padding=1) + ) + + def forward(self, x): + y = self.head_join( + self.head_long(x) + + self.head_short(x) + ) + z = self.core(y) + return self.tail(z) + + +vae_approx_model = None +vae_approx_filename = os.path.join(path_vae_approx, 'xl-to-v1_interposer-v3.1.safetensors') + + +def parse(x): + global vae_approx_model + + x_origin = x.clone() + + if vae_approx_model is None: + model = Interposer() + model.eval() + sd = sf.load_file(vae_approx_filename) + model.load_state_dict(sd) + fp16 = ldm_patched.modules.model_management.should_use_fp16() + if fp16: + model = model.half() + vae_approx_model = ModelPatcher( + model=model, + load_device=ldm_patched.modules.model_management.get_torch_device(), + offload_device=torch.device('cpu') + ) + vae_approx_model.dtype = torch.float16 if fp16 else torch.float32 + + ldm_patched.modules.model_management.load_model_gpu(vae_approx_model) + + x = x_origin.to(device=vae_approx_model.load_device, dtype=vae_approx_model.dtype) + x = vae_approx_model.model(x).to(x_origin) + return x diff --git a/extras/wd14tagger.py b/extras/wd14tagger.py new file mode 100644 index 0000000000000000000000000000000000000000..368c13dfa7cecefa655097602a5b9e249ceedaef --- /dev/null +++ b/extras/wd14tagger.py @@ -0,0 +1,98 @@ +# https://huggingface.co/spaces/SmilingWolf/wd-v1-4-tags +# https://github.com/pythongosssss/ComfyUI-WD14-Tagger/blob/main/wd14tagger.py + +# { +# "wd-v1-4-moat-tagger-v2": "https://huggingface.co/SmilingWolf/wd-v1-4-moat-tagger-v2", +# "wd-v1-4-convnextv2-tagger-v2": "https://huggingface.co/SmilingWolf/wd-v1-4-convnextv2-tagger-v2", +# "wd-v1-4-convnext-tagger-v2": "https://huggingface.co/SmilingWolf/wd-v1-4-convnext-tagger-v2", +# "wd-v1-4-convnext-tagger": "https://huggingface.co/SmilingWolf/wd-v1-4-convnext-tagger", +# "wd-v1-4-vit-tagger-v2": "https://huggingface.co/SmilingWolf/wd-v1-4-vit-tagger-v2" +# } + + +import numpy as np +import csv +import onnxruntime as ort + +from PIL import Image +from onnxruntime import InferenceSession +from modules.config import path_clip_vision +from modules.model_loader import load_file_from_url + + +global_model = None +global_csv = None + + +def default_interrogator(image_rgb, threshold=0.35, character_threshold=0.85, exclude_tags=""): + global global_model, global_csv + + model_name = "wd-v1-4-moat-tagger-v2" + + model_onnx_filename = load_file_from_url( + url=f'https://huggingface.co/lllyasviel/misc/resolve/main/{model_name}.onnx', + model_dir=path_clip_vision, + file_name=f'{model_name}.onnx', + ) + + model_csv_filename = load_file_from_url( + url=f'https://huggingface.co/lllyasviel/misc/resolve/main/{model_name}.csv', + model_dir=path_clip_vision, + file_name=f'{model_name}.csv', + ) + + if global_model is not None: + model = global_model + else: + model = InferenceSession(model_onnx_filename, providers=ort.get_available_providers()) + global_model = model + + input = model.get_inputs()[0] + height = input.shape[1] + + image = Image.fromarray(image_rgb) # RGB + ratio = float(height)/max(image.size) + new_size = tuple([int(x*ratio) for x in image.size]) + image = image.resize(new_size, Image.LANCZOS) + square = Image.new("RGB", (height, height), (255, 255, 255)) + square.paste(image, ((height-new_size[0])//2, (height-new_size[1])//2)) + + image = np.array(square).astype(np.float32) + image = image[:, :, ::-1] # RGB -> BGR + image = np.expand_dims(image, 0) + + if global_csv is not None: + csv_lines = global_csv + else: + csv_lines = [] + with open(model_csv_filename) as f: + reader = csv.reader(f) + next(reader) + for row in reader: + csv_lines.append(row) + global_csv = csv_lines + + tags = [] + general_index = None + character_index = None + for line_num, row in enumerate(csv_lines): + if general_index is None and row[2] == "0": + general_index = line_num + elif character_index is None and row[2] == "4": + character_index = line_num + tags.append(row[1]) + + label_name = model.get_outputs()[0].name + probs = model.run([label_name], {input.name: image})[0] + + result = list(zip(tags, probs[0])) + + general = [item for item in result[general_index:character_index] if item[1] > threshold] + character = [item for item in result[character_index:] if item[1] > character_threshold] + + all = character + general + remove = [s.strip() for s in exclude_tags.lower().split(",")] + all = [tag for tag in all if tag[0] not in remove] + + res = ", ".join((item[0].replace("(", "\\(").replace(")", "\\)") for item in all)).replace('_', ' ') + return res diff --git a/flagged/Input/7d6ccbf63c05db4fa47fb5a60492bfdb8005ecf7/tmp7s1b7ye2.jpg b/flagged/Input/7d6ccbf63c05db4fa47fb5a60492bfdb8005ecf7/tmp7s1b7ye2.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2b36774a4d67d4799c0a9605f6de6a03c7cc56a8 Binary files /dev/null and b/flagged/Input/7d6ccbf63c05db4fa47fb5a60492bfdb8005ecf7/tmp7s1b7ye2.jpg differ diff --git a/flagged/log.csv b/flagged/log.csv new file mode 100644 index 0000000000000000000000000000000000000000..4c5a0a7e5758293fffb75d7f172b5877ee1c4df0 --- /dev/null +++ b/flagged/log.csv @@ -0,0 +1 @@ +D:\ADITYA FILE\Developer\MICROSOFT\Microsoft Visual Studio Code\Project\Application Website\Nyxel\Flowly AI\My Project\AI Image\flagged\Input\7d6ccbf63c05db4fa47fb5a60492bfdb8005ecf7\tmp7s1b7ye2.jpg,Photograph,a person that has five bills in her hands,,,2024-03-14 11:18:06.554492 diff --git a/fooocus_colab.ipynb b/fooocus_colab.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..205dac55d4ed3aac4a1154d627465ac0e5474069 --- /dev/null +++ b/fooocus_colab.ipynb @@ -0,0 +1,35 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "VjYy0F2gZIPR" + }, + "outputs": [], + "source": [ + "!pip install pygit2==1.12.2\n", + "%cd /content\n", + "!git clone https://github.com/lllyasviel/Fooocus.git\n", + "%cd /content/Fooocus\n", + "!python entry_with_update.py --share\n" + ] + } + ], + "metadata": { + "accelerator": "GPU", + "colab": { + "gpuType": "T4", + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "name": "python3" + }, + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/fooocus_version.py b/fooocus_version.py new file mode 100644 index 0000000000000000000000000000000000000000..6c3c2c903805ba9f7ea4aa4d0b15aa1621ec3577 --- /dev/null +++ b/fooocus_version.py @@ -0,0 +1 @@ +version = '2.2.1' diff --git a/javascript/contextMenus.js b/javascript/contextMenus.js new file mode 100644 index 0000000000000000000000000000000000000000..7494674d336d03b05d1618746962c39f8cf5d440 --- /dev/null +++ b/javascript/contextMenus.js @@ -0,0 +1,166 @@ +// based on https://github.com/AUTOMATIC1111/stable-diffusion-webui/blob/v1.6.0/javascript/contextMenus.js + +var contextMenuInit = function() { + let eventListenerApplied = false; + let menuSpecs = new Map(); + + const uid = function() { + return Date.now().toString(36) + Math.random().toString(36).substring(2); + }; + + function showContextMenu(event, element, menuEntries) { + let posx = event.clientX + document.body.scrollLeft + document.documentElement.scrollLeft; + let posy = event.clientY + document.body.scrollTop + document.documentElement.scrollTop; + + let oldMenu = gradioApp().querySelector('#context-menu'); + if (oldMenu) { + oldMenu.remove(); + } + + let baseStyle = window.getComputedStyle(gradioApp().querySelector('button.selected')); + + const contextMenu = document.createElement('nav'); + contextMenu.id = "context-menu"; + contextMenu.style.background = baseStyle.background; + contextMenu.style.color = baseStyle.color; + contextMenu.style.fontFamily = baseStyle.fontFamily; + contextMenu.style.top = posy + 'px'; + contextMenu.style.left = posx + 'px'; + + const contextMenuList = document.createElement('ul'); + contextMenuList.className = 'context-menu-items'; + contextMenu.append(contextMenuList); + + menuEntries.forEach(function(entry) { + let contextMenuEntry = document.createElement('a'); + contextMenuEntry.innerHTML = entry['name']; + contextMenuEntry.addEventListener("click", function() { + entry['func'](); + }); + contextMenuList.append(contextMenuEntry); + + }); + + gradioApp().appendChild(contextMenu); + + let menuWidth = contextMenu.offsetWidth + 4; + let menuHeight = contextMenu.offsetHeight + 4; + + let windowWidth = window.innerWidth; + let windowHeight = window.innerHeight; + + if ((windowWidth - posx) < menuWidth) { + contextMenu.style.left = windowWidth - menuWidth + "px"; + } + + if ((windowHeight - posy) < menuHeight) { + contextMenu.style.top = windowHeight - menuHeight + "px"; + } + + } + + function appendContextMenuOption(targetElementSelector, entryName, entryFunction) { + + var currentItems = menuSpecs.get(targetElementSelector); + + if (!currentItems) { + currentItems = []; + menuSpecs.set(targetElementSelector, currentItems); + } + let newItem = { + id: targetElementSelector + '_' + uid(), + name: entryName, + func: entryFunction, + isNew: true + }; + + currentItems.push(newItem); + return newItem['id']; + } + + function removeContextMenuOption(uid) { + menuSpecs.forEach(function(v) { + let index = -1; + v.forEach(function(e, ei) { + if (e['id'] == uid) { + index = ei; + } + }); + if (index >= 0) { + v.splice(index, 1); + } + }); + } + + function addContextMenuEventListener() { + if (eventListenerApplied) { + return; + } + gradioApp().addEventListener("click", function(e) { + if (!e.isTrusted) { + return; + } + + let oldMenu = gradioApp().querySelector('#context-menu'); + if (oldMenu) { + oldMenu.remove(); + } + }); + gradioApp().addEventListener("contextmenu", function(e) { + let oldMenu = gradioApp().querySelector('#context-menu'); + if (oldMenu) { + oldMenu.remove(); + } + menuSpecs.forEach(function(v, k) { + if (e.composedPath()[0].matches(k)) { + showContextMenu(e, e.composedPath()[0], v); + e.preventDefault(); + } + }); + }); + eventListenerApplied = true; + + } + + return [appendContextMenuOption, removeContextMenuOption, addContextMenuEventListener]; +}; + +var initResponse = contextMenuInit(); +var appendContextMenuOption = initResponse[0]; +var removeContextMenuOption = initResponse[1]; +var addContextMenuEventListener = initResponse[2]; + +let cancelGenerateForever = function() { + clearInterval(window.generateOnRepeatInterval); +}; + +(function() { + //Start example Context Menu Items + let generateOnRepeat = function(genbuttonid, interruptbuttonid) { + let genbutton = gradioApp().querySelector(genbuttonid); + let interruptbutton = gradioApp().querySelector(interruptbuttonid); + if (!interruptbutton.offsetParent) { + genbutton.click(); + } + clearInterval(window.generateOnRepeatInterval); + window.generateOnRepeatInterval = setInterval(function() { + if (!interruptbutton.offsetParent) { + genbutton.click(); + } + }, + 500); + }; + + let generateOnRepeatForButtons = function() { + generateOnRepeat('#generate_button', '#stop_button'); + }; + appendContextMenuOption('#generate_button', 'Generate forever', generateOnRepeatForButtons); + +})(); +//End example Context Menu Items + +document.onreadystatechange = function () { + if (document.readyState == "complete") { + addContextMenuEventListener(); + } +}; diff --git a/javascript/edit-attention.js b/javascript/edit-attention.js new file mode 100644 index 0000000000000000000000000000000000000000..3f38f14b52831679eed1491291e4602112f1880c --- /dev/null +++ b/javascript/edit-attention.js @@ -0,0 +1,128 @@ +function updateInput(target) { + let e = new Event("input", {bubbles: true}); + Object.defineProperty(e, "target", {value: target}); + target.dispatchEvent(e); +} + +function keyupEditAttention(event) { + let target = event.originalTarget || event.composedPath()[0]; + if (!target.matches("*:is([id*='_prompt'], .prompt) textarea")) return; + if (!(event.metaKey || event.ctrlKey)) return; + + let isPlus = event.key == "ArrowUp"; + let isMinus = event.key == "ArrowDown"; + if (!isPlus && !isMinus) return; + + let selectionStart = target.selectionStart; + let selectionEnd = target.selectionEnd; + let text = target.value; + + function selectCurrentParenthesisBlock(OPEN, CLOSE) { + if (selectionStart !== selectionEnd) return false; + + // Find opening parenthesis around current cursor + const before = text.substring(0, selectionStart); + let beforeParen = before.lastIndexOf(OPEN); + if (beforeParen == -1) return false; + let beforeParenClose = before.lastIndexOf(CLOSE); + while (beforeParenClose !== -1 && beforeParenClose > beforeParen) { + beforeParen = before.lastIndexOf(OPEN, beforeParen - 1); + beforeParenClose = before.lastIndexOf(CLOSE, beforeParenClose - 1); + } + + // Find closing parenthesis around current cursor + const after = text.substring(selectionStart); + let afterParen = after.indexOf(CLOSE); + if (afterParen == -1) return false; + let afterParenOpen = after.indexOf(OPEN); + while (afterParenOpen !== -1 && afterParen > afterParenOpen) { + afterParen = after.indexOf(CLOSE, afterParen + 1); + afterParenOpen = after.indexOf(OPEN, afterParenOpen + 1); + } + if (beforeParen === -1 || afterParen === -1) return false; + + // Set the selection to the text between the parenthesis + const parenContent = text.substring(beforeParen + 1, selectionStart + afterParen); + const lastColon = parenContent.lastIndexOf(":"); + selectionStart = beforeParen + 1; + selectionEnd = selectionStart + lastColon; + target.setSelectionRange(selectionStart, selectionEnd); + return true; + } + + function selectCurrentWord() { + if (selectionStart !== selectionEnd) return false; + const delimiters = ".,\\/!?%^*;:{}=`~() \r\n\t"; + + // seek backward until to find beggining + while (!delimiters.includes(text[selectionStart - 1]) && selectionStart > 0) { + selectionStart--; + } + + // seek forward to find end + while (!delimiters.includes(text[selectionEnd]) && selectionEnd < text.length) { + selectionEnd++; + } + + target.setSelectionRange(selectionStart, selectionEnd); + return true; + } + + // If the user hasn't selected anything, let's select their current parenthesis block or word + if (!selectCurrentParenthesisBlock('<', '>') && !selectCurrentParenthesisBlock('(', ')')) { + selectCurrentWord(); + } + + event.preventDefault(); + + var closeCharacter = ')'; + var delta = 0.1; + + if (selectionStart > 0 && text[selectionStart - 1] == '<') { + closeCharacter = '>'; + delta = 0.05; + } else if (selectionStart == 0 || text[selectionStart - 1] != "(") { + + // do not include spaces at the end + while (selectionEnd > selectionStart && text[selectionEnd - 1] == ' ') { + selectionEnd -= 1; + } + if (selectionStart == selectionEnd) { + return; + } + + text = text.slice(0, selectionStart) + "(" + text.slice(selectionStart, selectionEnd) + ":1.0)" + text.slice(selectionEnd); + + selectionStart += 1; + selectionEnd += 1; + } + + var end = text.slice(selectionEnd + 1).indexOf(closeCharacter) + 1; + var weight = parseFloat(text.slice(selectionEnd + 1, selectionEnd + 1 + end)); + if (isNaN(weight)) return; + + weight += isPlus ? delta : -delta; + weight = parseFloat(weight.toPrecision(12)); + if (String(weight).length == 1) weight += ".0"; + + if (closeCharacter == ')' && weight == 1) { + var endParenPos = text.substring(selectionEnd).indexOf(')'); + text = text.slice(0, selectionStart - 1) + text.slice(selectionStart, selectionEnd) + text.slice(selectionEnd + endParenPos + 1); + selectionStart--; + selectionEnd--; + } else { + text = text.slice(0, selectionEnd + 1) + weight + text.slice(selectionEnd + end); + } + + target.focus(); + target.value = text; + target.selectionStart = selectionStart; + target.selectionEnd = selectionEnd; + + updateInput(target); + +} + +addEventListener('keydown', (event) => { + keyupEditAttention(event); +}); diff --git a/javascript/imageviewer.js b/javascript/imageviewer.js new file mode 100644 index 0000000000000000000000000000000000000000..29f0f312bc02eae80f8e4a0245a7949d43dfad8d --- /dev/null +++ b/javascript/imageviewer.js @@ -0,0 +1,260 @@ +// From A1111 + +function closeModal() { + gradioApp().getElementById("lightboxModal").style.display = "none"; +} + +function showModal(event) { + const source = event.target || event.srcElement; + const modalImage = gradioApp().getElementById("modalImage"); + const lb = gradioApp().getElementById("lightboxModal"); + modalImage.src = source.src; + if (modalImage.style.display === 'none') { + lb.style.setProperty('background-image', 'url(' + source.src + ')'); + } + lb.style.display = "flex"; + lb.focus(); + + event.stopPropagation(); +} + +function negmod(n, m) { + return ((n % m) + m) % m; +} + +function updateOnBackgroundChange() { + const modalImage = gradioApp().getElementById("modalImage"); + if (modalImage && modalImage.offsetParent) { + let currentButton = selected_gallery_button(); + + if (currentButton?.children?.length > 0 && modalImage.src != currentButton.children[0].src) { + modalImage.src = currentButton.children[0].src; + if (modalImage.style.display === 'none') { + const modal = gradioApp().getElementById("lightboxModal"); + modal.style.setProperty('background-image', `url(${modalImage.src})`); + } + } + } +} + +function all_gallery_buttons() { + var allGalleryButtons = gradioApp().querySelectorAll('.image_gallery .thumbnails > .thumbnail-item.thumbnail-small'); + var visibleGalleryButtons = []; + allGalleryButtons.forEach(function(elem) { + if (elem.parentElement.offsetParent) { + visibleGalleryButtons.push(elem); + } + }); + return visibleGalleryButtons; +} + +function selected_gallery_button() { + return all_gallery_buttons().find(elem => elem.classList.contains('selected')) ?? null; +} + +function selected_gallery_index() { + return all_gallery_buttons().findIndex(elem => elem.classList.contains('selected')); +} + +function modalImageSwitch(offset) { + var galleryButtons = all_gallery_buttons(); + + if (galleryButtons.length > 1) { + var currentButton = selected_gallery_button(); + + var result = -1; + galleryButtons.forEach(function(v, i) { + if (v == currentButton) { + result = i; + } + }); + + if (result != -1) { + var nextButton = galleryButtons[negmod((result + offset), galleryButtons.length)]; + nextButton.click(); + const modalImage = gradioApp().getElementById("modalImage"); + const modal = gradioApp().getElementById("lightboxModal"); + modalImage.src = nextButton.children[0].src; + if (modalImage.style.display === 'none') { + modal.style.setProperty('background-image', `url(${modalImage.src})`); + } + setTimeout(function() { + modal.focus(); + }, 10); + } + } +} + +function saveImage() { + +} + +function modalSaveImage(event) { + event.stopPropagation(); +} + +function modalNextImage(event) { + modalImageSwitch(1); + event.stopPropagation(); +} + +function modalPrevImage(event) { + modalImageSwitch(-1); + event.stopPropagation(); +} + +function modalKeyHandler(event) { + switch (event.key) { + case "s": + saveImage(); + break; + case "ArrowLeft": + modalPrevImage(event); + break; + case "ArrowRight": + modalNextImage(event); + break; + case "Escape": + closeModal(); + break; + } +} + +function setupImageForLightbox(e) { + if (e.dataset.modded) { + return; + } + + e.dataset.modded = true; + e.style.cursor = 'pointer'; + e.style.userSelect = 'none'; + + var isFirefox = navigator.userAgent.toLowerCase().indexOf('firefox') > -1; + + // For Firefox, listening on click first switched to next image then shows the lightbox. + // If you know how to fix this without switching to mousedown event, please. + // For other browsers the event is click to make it possiblr to drag picture. + var event = isFirefox ? 'mousedown' : 'click'; + + e.addEventListener(event, function(evt) { + if (evt.button == 1) { + open(evt.target.src); + evt.preventDefault(); + return; + } + if (evt.button != 0) return; + + modalZoomSet(gradioApp().getElementById('modalImage'), true); + evt.preventDefault(); + showModal(evt); + }, true); + +} + +function modalZoomSet(modalImage, enable) { + if (modalImage) modalImage.classList.toggle('modalImageFullscreen', !!enable); +} + +function modalZoomToggle(event) { + var modalImage = gradioApp().getElementById("modalImage"); + modalZoomSet(modalImage, !modalImage.classList.contains('modalImageFullscreen')); + event.stopPropagation(); +} + +function modalTileImageToggle(event) { + const modalImage = gradioApp().getElementById("modalImage"); + const modal = gradioApp().getElementById("lightboxModal"); + const isTiling = modalImage.style.display === 'none'; + if (isTiling) { + modalImage.style.display = 'block'; + modal.style.setProperty('background-image', 'none'); + } else { + modalImage.style.display = 'none'; + modal.style.setProperty('background-image', `url(${modalImage.src})`); + } + + event.stopPropagation(); +} + +onAfterUiUpdate(function() { + var fullImg_preview = gradioApp().querySelectorAll('.image_gallery > div > img'); + if (fullImg_preview != null) { + fullImg_preview.forEach(setupImageForLightbox); + } + updateOnBackgroundChange(); +}); + +document.addEventListener("DOMContentLoaded", function() { + //const modalFragment = document.createDocumentFragment(); + const modal = document.createElement('div'); + modal.onclick = closeModal; + modal.id = "lightboxModal"; + modal.tabIndex = 0; + modal.addEventListener('keydown', modalKeyHandler, true); + + const modalControls = document.createElement('div'); + modalControls.className = 'modalControls gradio-container'; + modal.append(modalControls); + + const modalZoom = document.createElement('span'); + modalZoom.className = 'modalZoom cursor'; + modalZoom.innerHTML = '⤡'; + modalZoom.addEventListener('click', modalZoomToggle, true); + modalZoom.title = "Toggle zoomed view"; + modalControls.appendChild(modalZoom); + + // const modalTileImage = document.createElement('span'); + // modalTileImage.className = 'modalTileImage cursor'; + // modalTileImage.innerHTML = '⊞'; + // modalTileImage.addEventListener('click', modalTileImageToggle, true); + // modalTileImage.title = "Preview tiling"; + // modalControls.appendChild(modalTileImage); + // + // const modalSave = document.createElement("span"); + // modalSave.className = "modalSave cursor"; + // modalSave.id = "modal_save"; + // modalSave.innerHTML = "🖫"; + // modalSave.addEventListener("click", modalSaveImage, true); + // modalSave.title = "Save Image(s)"; + // modalControls.appendChild(modalSave); + + const modalClose = document.createElement('span'); + modalClose.className = 'modalClose cursor'; + modalClose.innerHTML = '×'; + modalClose.onclick = closeModal; + modalClose.title = "Close image viewer"; + modalControls.appendChild(modalClose); + + const modalImage = document.createElement('img'); + modalImage.id = 'modalImage'; + modalImage.onclick = closeModal; + modalImage.tabIndex = 0; + modalImage.addEventListener('keydown', modalKeyHandler, true); + modal.appendChild(modalImage); + + const modalPrev = document.createElement('a'); + modalPrev.className = 'modalPrev'; + modalPrev.innerHTML = '❮'; + modalPrev.tabIndex = 0; + modalPrev.addEventListener('click', modalPrevImage, true); + modalPrev.addEventListener('keydown', modalKeyHandler, true); + modal.appendChild(modalPrev); + + const modalNext = document.createElement('a'); + modalNext.className = 'modalNext'; + modalNext.innerHTML = '❯'; + modalNext.tabIndex = 0; + modalNext.addEventListener('click', modalNextImage, true); + modalNext.addEventListener('keydown', modalKeyHandler, true); + + modal.appendChild(modalNext); + + try { + gradioApp().appendChild(modal); + } catch (e) { + gradioApp().body.appendChild(modal); + } + + document.body.appendChild(modal); + +}); diff --git a/javascript/localization.js b/javascript/localization.js new file mode 100644 index 0000000000000000000000000000000000000000..0a8394ca2e7e82d5c8c5708655fffb811b43f430 --- /dev/null +++ b/javascript/localization.js @@ -0,0 +1,144 @@ +var re_num = /^[.\d]+$/; + +var original_lines = {}; +var translated_lines = {}; + +function hasLocalization() { + return window.localization && Object.keys(window.localization).length > 0; +} + +function textNodesUnder(el) { + var n, a = [], walk = document.createTreeWalker(el, NodeFilter.SHOW_TEXT, null, false); + while ((n = walk.nextNode())) a.push(n); + return a; +} + +function canBeTranslated(node, text) { + if (!text) return false; + if (!node.parentElement) return false; + var parentType = node.parentElement.nodeName; + if (parentType == 'SCRIPT' || parentType == 'STYLE' || parentType == 'TEXTAREA') return false; + if (re_num.test(text)) return false; + return true; +} + +function getTranslation(text) { + if (!text) return undefined; + + if (translated_lines[text] === undefined) { + original_lines[text] = 1; + } + + var tl = localization[text]; + if (tl !== undefined) { + translated_lines[tl] = 1; + } + + return tl; +} + +function processTextNode(node) { + var text = node.textContent.trim(); + + if (!canBeTranslated(node, text)) return; + + var tl = getTranslation(text); + if (tl !== undefined) { + node.textContent = tl; + if (text && node.parentElement) { + node.parentElement.setAttribute("data-original-text", text); + } + } +} + +function processNode(node) { + if (node.nodeType == 3) { + processTextNode(node); + return; + } + + if (node.title) { + let tl = getTranslation(node.title); + if (tl !== undefined) { + node.title = tl; + } + } + + if (node.placeholder) { + let tl = getTranslation(node.placeholder); + if (tl !== undefined) { + node.placeholder = tl; + } + } + + textNodesUnder(node).forEach(function(node) { + processTextNode(node); + }); +} + +function refresh_style_localization() { + processNode(document.querySelector('.style_selections')); +} + +function localizeWholePage() { + processNode(gradioApp()); + + function elem(comp) { + var elem_id = comp.props.elem_id ? comp.props.elem_id : "component-" + comp.id; + return gradioApp().getElementById(elem_id); + } + + for (var comp of window.gradio_config.components) { + if (comp.props.webui_tooltip) { + let e = elem(comp); + + let tl = e ? getTranslation(e.title) : undefined; + if (tl !== undefined) { + e.title = tl; + } + } + if (comp.props.placeholder) { + let e = elem(comp); + let textbox = e ? e.querySelector('[placeholder]') : null; + + let tl = textbox ? getTranslation(textbox.placeholder) : undefined; + if (tl !== undefined) { + textbox.placeholder = tl; + } + } + } +} + +document.addEventListener("DOMContentLoaded", function() { + if (!hasLocalization()) { + return; + } + + onUiUpdate(function(m) { + m.forEach(function(mutation) { + mutation.addedNodes.forEach(function(node) { + processNode(node); + }); + }); + }); + + localizeWholePage(); + + if (localization.rtl) { // if the language is from right to left, + (new MutationObserver((mutations, observer) => { // wait for the style to load + mutations.forEach(mutation => { + mutation.addedNodes.forEach(node => { + if (node.tagName === 'STYLE') { + observer.disconnect(); + + for (const x of node.sheet.rules) { // find all rtl media rules + if (Array.from(x.media || []).includes('rtl')) { + x.media.appendMedium('all'); // enable them + } + } + } + }); + }); + })).observe(gradioApp(), {childList: true}); + } +}); diff --git a/javascript/script.js b/javascript/script.js new file mode 100644 index 0000000000000000000000000000000000000000..8f4cac58f52686a6d850e4d3744470435d3a3929 --- /dev/null +++ b/javascript/script.js @@ -0,0 +1,215 @@ +// based on https://github.com/AUTOMATIC1111/stable-diffusion-webui/blob/v1.6.0/script.js +function gradioApp() { + const elems = document.getElementsByTagName('gradio-app'); + const elem = elems.length == 0 ? document : elems[0]; + + if (elem !== document) { + elem.getElementById = function(id) { + return document.getElementById(id); + }; + } + return elem.shadowRoot ? elem.shadowRoot : elem; +} + +/** + * Get the currently selected top-level UI tab button (e.g. the button that says "Extras"). + */ +function get_uiCurrentTab() { + return gradioApp().querySelector('#tabs > .tab-nav > button.selected'); +} + +/** + * Get the first currently visible top-level UI tab content (e.g. the div hosting the "txt2img" UI). + */ +function get_uiCurrentTabContent() { + return gradioApp().querySelector('#tabs > .tabitem[id^=tab_]:not([style*="display: none"])'); +} + +var uiUpdateCallbacks = []; +var uiAfterUpdateCallbacks = []; +var uiLoadedCallbacks = []; +var uiTabChangeCallbacks = []; +var optionsChangedCallbacks = []; +var uiAfterUpdateTimeout = null; +var uiCurrentTab = null; + +/** + * Register callback to be called at each UI update. + * The callback receives an array of MutationRecords as an argument. + */ +function onUiUpdate(callback) { + uiUpdateCallbacks.push(callback); +} + +/** + * Register callback to be called soon after UI updates. + * The callback receives no arguments. + * + * This is preferred over `onUiUpdate` if you don't need + * access to the MutationRecords, as your function will + * not be called quite as often. + */ +function onAfterUiUpdate(callback) { + uiAfterUpdateCallbacks.push(callback); +} + +/** + * Register callback to be called when the UI is loaded. + * The callback receives no arguments. + */ +function onUiLoaded(callback) { + uiLoadedCallbacks.push(callback); +} + +/** + * Register callback to be called when the UI tab is changed. + * The callback receives no arguments. + */ +function onUiTabChange(callback) { + uiTabChangeCallbacks.push(callback); +} + +/** + * Register callback to be called when the options are changed. + * The callback receives no arguments. + * @param callback + */ +function onOptionsChanged(callback) { + optionsChangedCallbacks.push(callback); +} + +function executeCallbacks(queue, arg) { + for (const callback of queue) { + try { + callback(arg); + } catch (e) { + console.error("error running callback", callback, ":", e); + } + } +} + +/** + * Schedule the execution of the callbacks registered with onAfterUiUpdate. + * The callbacks are executed after a short while, unless another call to this function + * is made before that time. IOW, the callbacks are executed only once, even + * when there are multiple mutations observed. + */ +function scheduleAfterUiUpdateCallbacks() { + clearTimeout(uiAfterUpdateTimeout); + uiAfterUpdateTimeout = setTimeout(function() { + executeCallbacks(uiAfterUpdateCallbacks); + }, 200); +} + +var executedOnLoaded = false; + +document.addEventListener("DOMContentLoaded", function() { + var mutationObserver = new MutationObserver(function(m) { + if (!executedOnLoaded && gradioApp().querySelector('#generate_button')) { + executedOnLoaded = true; + executeCallbacks(uiLoadedCallbacks); + } + + executeCallbacks(uiUpdateCallbacks, m); + scheduleAfterUiUpdateCallbacks(); + const newTab = get_uiCurrentTab(); + if (newTab && (newTab !== uiCurrentTab)) { + uiCurrentTab = newTab; + executeCallbacks(uiTabChangeCallbacks); + } + }); + mutationObserver.observe(gradioApp(), {childList: true, subtree: true}); + initStylePreviewOverlay(); +}); + +/** + * Add a ctrl+enter as a shortcut to start a generation + */ +document.addEventListener('keydown', function(e) { + const isModifierKey = (e.metaKey || e.ctrlKey || e.altKey); + const isEnterKey = (e.key == "Enter" || e.keyCode == 13); + + if(isModifierKey && isEnterKey) { + const generateButton = gradioApp().querySelector('button:not(.hidden)[id=generate_button]'); + if (generateButton) { + generateButton.click(); + e.preventDefault(); + return; + } + + const stopButton = gradioApp().querySelector('button:not(.hidden)[id=stop_button]') + if(stopButton) { + stopButton.click(); + e.preventDefault(); + return; + } + } +}); + +function initStylePreviewOverlay() { + let overlayVisible = false; + const samplesPath = document.querySelector("meta[name='samples-path']").getAttribute("content") + const overlay = document.createElement('div'); + overlay.id = 'stylePreviewOverlay'; + document.body.appendChild(overlay); + document.addEventListener('mouseover', function(e) { + const label = e.target.closest('.style_selections label'); + if (!label) return; + label.removeEventListener("mouseout", onMouseLeave); + label.addEventListener("mouseout", onMouseLeave); + overlayVisible = true; + overlay.style.opacity = "1"; + const originalText = label.querySelector("span").getAttribute("data-original-text"); + const name = originalText || label.querySelector("span").textContent; + overlay.style.backgroundImage = `url("${samplesPath.replace( + "fooocus_v2", + name.toLowerCase().replaceAll(" ", "_") + ).replaceAll("\\", "\\\\")}")`; + function onMouseLeave() { + overlayVisible = false; + overlay.style.opacity = "0"; + overlay.style.backgroundImage = ""; + label.removeEventListener("mouseout", onMouseLeave); + } + }); + document.addEventListener('mousemove', function(e) { + if(!overlayVisible) return; + overlay.style.left = `${e.clientX}px`; + overlay.style.top = `${e.clientY}px`; + overlay.className = e.clientY > window.innerHeight / 2 ? "lower-half" : "upper-half"; + }); +} + +/** + * checks that a UI element is not in another hidden element or tab content + */ +function uiElementIsVisible(el) { + if (el === document) { + return true; + } + + const computedStyle = getComputedStyle(el); + const isVisible = computedStyle.display !== 'none'; + + if (!isVisible) return false; + return uiElementIsVisible(el.parentNode); +} + +function uiElementInSight(el) { + const clRect = el.getBoundingClientRect(); + const windowHeight = window.innerHeight; + const isOnScreen = clRect.bottom > 0 && clRect.top < windowHeight; + + return isOnScreen; +} + +function playNotification() { + gradioApp().querySelector('#audio_notification audio')?.play(); +} + +function set_theme(theme) { + var gradioURL = window.location.href; + if (!gradioURL.includes('?__theme=')) { + window.location.replace(gradioURL + '?__theme=' + theme); + } +} diff --git a/javascript/viewer.js b/javascript/viewer.js new file mode 100644 index 0000000000000000000000000000000000000000..3df32ccf4e78fe07f0d71f6877015f51e6154f2f --- /dev/null +++ b/javascript/viewer.js @@ -0,0 +1,88 @@ +window.main_viewer_height = 512; + +function refresh_grid() { + let gridContainer = document.querySelector('#final_gallery .grid-container'); + let final_gallery = document.getElementById('final_gallery'); + + if (gridContainer) if (final_gallery) { + let rect = final_gallery.getBoundingClientRect(); + let cols = Math.ceil((rect.width - 16.0) / rect.height); + if (cols < 2) cols = 2; + gridContainer.style.setProperty('--grid-cols', cols); + } +} + +function refresh_grid_delayed() { + refresh_grid(); + setTimeout(refresh_grid, 100); + setTimeout(refresh_grid, 500); + setTimeout(refresh_grid, 1000); +} + +function resized() { + let windowHeight = window.innerHeight - 260; + let elements = document.getElementsByClassName('main_view'); + + if (windowHeight > 745) windowHeight = 745; + + for (let i = 0; i < elements.length; i++) { + elements[i].style.height = windowHeight + 'px'; + } + + window.main_viewer_height = windowHeight; + + refresh_grid(); +} + +function viewer_to_top(delay = 100) { + setTimeout(() => window.scrollTo({top: 0, behavior: 'smooth'}), delay); +} + +function viewer_to_bottom(delay = 100) { + let element = document.getElementById('positive_prompt'); + let yPos = window.main_viewer_height; + + if (element) { + yPos = element.getBoundingClientRect().top + window.scrollY; + } + + setTimeout(() => window.scrollTo({top: yPos - 8, behavior: 'smooth'}), delay); +} + +window.addEventListener('resize', (e) => { + resized(); +}); + +onUiLoaded(async () => { + resized(); +}); + +function on_style_selection_blur() { + let target = document.querySelector("#gradio_receiver_style_selections textarea"); + target.value = "on_style_selection_blur " + Math.random(); + let e = new Event("input", {bubbles: true}) + Object.defineProperty(e, "target", {value: target}) + target.dispatchEvent(e); +} + +onUiLoaded(async () => { + let spans = document.querySelectorAll('.aspect_ratios span'); + + spans.forEach(function (span) { + span.innerHTML = span.innerHTML.replace(/</g, '<').replace(/>/g, '>'); + }); + + document.querySelector('.style_selections').addEventListener('focusout', function (event) { + setTimeout(() => { + if (!this.contains(document.activeElement)) { + on_style_selection_blur(); + } + }, 200); + }); + + let inputs = document.querySelectorAll('.lora_weight input[type="range"]'); + + inputs.forEach(function (input) { + input.style.marginTop = '12px'; + }); +}); diff --git a/javascript/zoom.js b/javascript/zoom.js new file mode 100644 index 0000000000000000000000000000000000000000..450a034729cbdb598ff2bbf84a7b76572f13c573 --- /dev/null +++ b/javascript/zoom.js @@ -0,0 +1,645 @@ +onUiLoaded(async() => { + // Helper functions + + // Detect whether the element has a horizontal scroll bar + function hasHorizontalScrollbar(element) { + return element.scrollWidth > element.clientWidth; + } + + // Function for defining the "Ctrl", "Shift" and "Alt" keys + function isModifierKey(event, key) { + switch (key) { + case "Ctrl": + return event.ctrlKey; + case "Shift": + return event.shiftKey; + case "Alt": + return event.altKey; + default: + return false; + } + } + + // Create hotkey configuration with the provided options + function createHotkeyConfig(defaultHotkeysConfig) { + const result = {}; // Resulting hotkey configuration + for (const key in defaultHotkeysConfig) { + result[key] = defaultHotkeysConfig[key]; + } + return result; + } + + // Default config + const defaultHotkeysConfig = { + canvas_hotkey_zoom: "Shift", + canvas_hotkey_adjust: "Ctrl", + canvas_zoom_undo_extra_key: "Ctrl", + canvas_zoom_hotkey_undo: "KeyZ", + canvas_hotkey_reset: "KeyR", + canvas_hotkey_fullscreen: "KeyS", + canvas_hotkey_move: "KeyF", + canvas_show_tooltip: true, + canvas_auto_expand: true, + canvas_blur_prompt: true, + }; + + // Loading the configuration from opts + const hotkeysConfig = createHotkeyConfig( + defaultHotkeysConfig + ); + + let isMoving = false; + let activeElement; + + const elemData = {}; + + function applyZoomAndPan(elemId) { + const targetElement = gradioApp().querySelector(elemId); + + if (!targetElement) { + console.log("Element not found"); + return; + } + + targetElement.style.transformOrigin = "0 0"; + + elemData[elemId] = { + zoom: 1, + panX: 0, + panY: 0 + }; + + let fullScreenMode = false; + + // Create tooltip + function createTooltip() { + const toolTipElemnt = + targetElement.querySelector(".image-container"); + const tooltip = document.createElement("div"); + tooltip.className = "canvas-tooltip"; + + // Creating an item of information + const info = document.createElement("i"); + info.className = "canvas-tooltip-info"; + info.textContent = ""; + + // Create a container for the contents of the tooltip + const tooltipContent = document.createElement("div"); + tooltipContent.className = "canvas-tooltip-content"; + + // Define an array with hotkey information and their actions + const hotkeysInfo = [ + { + configKey: "canvas_hotkey_zoom", + action: "Zoom canvas", + keySuffix: " + wheel" + }, + { + configKey: "canvas_hotkey_adjust", + action: "Adjust brush size", + keySuffix: " + wheel" + }, + {configKey: "canvas_zoom_hotkey_undo", action: "Undo last action", keyPrefix: `${hotkeysConfig.canvas_zoom_undo_extra_key} + ` }, + {configKey: "canvas_hotkey_reset", action: "Reset zoom"}, + { + configKey: "canvas_hotkey_fullscreen", + action: "Fullscreen mode" + }, + {configKey: "canvas_hotkey_move", action: "Move canvas"} + ]; + + // Create hotkeys array based on the config values + const hotkeys = hotkeysInfo.map((info) => { + const configValue = hotkeysConfig[info.configKey]; + + let key = configValue.slice(-1); + + if (info.keySuffix) { + key = `${configValue}${info.keySuffix}`; + } + + if (info.keyPrefix && info.keyPrefix !== "None + ") { + key = `${info.keyPrefix}${configValue[3]}`; + } + + return { + key, + action: info.action, + }; + }); + + hotkeys + .forEach(hotkey => { + const p = document.createElement("p"); + p.innerHTML = `${hotkey.key} - ${hotkey.action}`; + tooltipContent.appendChild(p); + }); + + tooltip.append(info, tooltipContent); + + // Add a hint element to the target element + toolTipElemnt.appendChild(tooltip); + } + + //Show tool tip if setting enable + if (hotkeysConfig.canvas_show_tooltip) { + createTooltip(); + } + + // Reset the zoom level and pan position of the target element to their initial values + function resetZoom() { + elemData[elemId] = { + zoomLevel: 1, + panX: 0, + panY: 0 + }; + + targetElement.style.overflow = "hidden"; + + targetElement.isZoomed = false; + + targetElement.style.transform = `scale(${elemData[elemId].zoomLevel}) translate(${elemData[elemId].panX}px, ${elemData[elemId].panY}px)`; + + const canvas = gradioApp().querySelector( + `${elemId} canvas[key="interface"]` + ); + + toggleOverlap("off"); + fullScreenMode = false; + + const closeBtn = targetElement.querySelector("button[aria-label='Remove Image']"); + if (closeBtn) { + closeBtn.addEventListener("click", resetZoom); + } + + if (canvas) { + const parentElement = targetElement.closest('[id^="component-"]'); + if ( + canvas && + parseFloat(canvas.style.width) > parentElement.offsetWidth && + parseFloat(targetElement.style.width) > parentElement.offsetWidth + ) { + fitToElement(); + return; + } + + } + + targetElement.style.width = ""; + } + + // Toggle the zIndex of the target element between two values, allowing it to overlap or be overlapped by other elements + function toggleOverlap(forced = "") { + const zIndex1 = "0"; + const zIndex2 = "998"; + + targetElement.style.zIndex = + targetElement.style.zIndex !== zIndex2 ? zIndex2 : zIndex1; + + if (forced === "off") { + targetElement.style.zIndex = zIndex1; + } else if (forced === "on") { + targetElement.style.zIndex = zIndex2; + } + } + + // Adjust the brush size based on the deltaY value from a mouse wheel event + function adjustBrushSize( + elemId, + deltaY, + withoutValue = false, + percentage = 5 + ) { + const input = + gradioApp().querySelector( + `${elemId} input[aria-label='Brush radius']` + ) || + gradioApp().querySelector( + `${elemId} button[aria-label="Use brush"]` + ); + + if (input) { + input.click(); + if (!withoutValue) { + const maxValue = + parseFloat(input.getAttribute("max")) || 100; + const changeAmount = maxValue * (percentage / 100); + const newValue = + parseFloat(input.value) + + (deltaY > 0 ? -changeAmount : changeAmount); + input.value = Math.min(Math.max(newValue, 0), maxValue); + input.dispatchEvent(new Event("change")); + } + } + } + + // Reset zoom when uploading a new image + const fileInput = gradioApp().querySelector( + `${elemId} input[type="file"][accept="image/*"].svelte-116rqfv` + ); + fileInput.addEventListener("click", resetZoom); + + // Update the zoom level and pan position of the target element based on the values of the zoomLevel, panX and panY variables + function updateZoom(newZoomLevel, mouseX, mouseY) { + newZoomLevel = Math.max(0.1, Math.min(newZoomLevel, 15)); + + elemData[elemId].panX += + mouseX - (mouseX * newZoomLevel) / elemData[elemId].zoomLevel; + elemData[elemId].panY += + mouseY - (mouseY * newZoomLevel) / elemData[elemId].zoomLevel; + + targetElement.style.transformOrigin = "0 0"; + targetElement.style.transform = `translate(${elemData[elemId].panX}px, ${elemData[elemId].panY}px) scale(${newZoomLevel})`; + targetElement.style.overflow = "visible"; + + toggleOverlap("on"); + + return newZoomLevel; + } + + // Change the zoom level based on user interaction + function changeZoomLevel(operation, e) { + if (isModifierKey(e, hotkeysConfig.canvas_hotkey_zoom)) { + e.preventDefault(); + + let zoomPosX, zoomPosY; + let delta = 0.2; + + if (elemData[elemId].zoomLevel > 7) { + delta = 0.9; + } else if (elemData[elemId].zoomLevel > 2) { + delta = 0.6; + } + + zoomPosX = e.clientX; + zoomPosY = e.clientY; + + fullScreenMode = false; + elemData[elemId].zoomLevel = updateZoom( + elemData[elemId].zoomLevel + + (operation === "+" ? delta : -delta), + zoomPosX - targetElement.getBoundingClientRect().left, + zoomPosY - targetElement.getBoundingClientRect().top + ); + + targetElement.isZoomed = true; + } + } + + /** + * This function fits the target element to the screen by calculating + * the required scale and offsets. It also updates the global variables + * zoomLevel, panX, and panY to reflect the new state. + */ + + function fitToElement() { + //Reset Zoom + targetElement.style.transform = `translate(${0}px, ${0}px) scale(${1})`; + + let parentElement; + + parentElement = targetElement.closest('[id^="component-"]'); + + // Get element and screen dimensions + const elementWidth = targetElement.offsetWidth; + const elementHeight = targetElement.offsetHeight; + + const screenWidth = parentElement.clientWidth - 24; + const screenHeight = parentElement.clientHeight; + + // Calculate scale and offsets + const scaleX = screenWidth / elementWidth; + const scaleY = screenHeight / elementHeight; + const scale = Math.min(scaleX, scaleY); + + const offsetX =0; + const offsetY =0; + + // Apply scale and offsets to the element + targetElement.style.transform = `translate(${offsetX}px, ${offsetY}px) scale(${scale})`; + + // Update global variables + elemData[elemId].zoomLevel = scale; + elemData[elemId].panX = offsetX; + elemData[elemId].panY = offsetY; + + fullScreenMode = false; + toggleOverlap("off"); + } + + // Undo last action + function undoLastAction(e) { + let isCtrlPressed = isModifierKey(e, hotkeysConfig.canvas_zoom_undo_extra_key) + const isAuxButton = e.button >= 3; + + if (isAuxButton) { + isCtrlPressed = true + } else { + if (!isModifierKey(e, hotkeysConfig.canvas_zoom_undo_extra_key)) return; + } + + // Move undoBtn query outside the if statement to avoid unnecessary queries + const undoBtn = document.querySelector(`${activeElement} button[aria-label="Undo"]`); + + if ((isCtrlPressed) && undoBtn ) { + e.preventDefault(); + undoBtn.click(); + } + } + + /** + * This function fits the target element to the screen by calculating + * the required scale and offsets. It also updates the global variables + * zoomLevel, panX, and panY to reflect the new state. + */ + + // Fullscreen mode + function fitToScreen() { + const canvas = gradioApp().querySelector( + `${elemId} canvas[key="interface"]` + ); + + if (!canvas) return; + + targetElement.style.width = (canvas.offsetWidth + 2) + "px"; + targetElement.style.overflow = "visible"; + + if (fullScreenMode) { + resetZoom(); + fullScreenMode = false; + return; + } + + //Reset Zoom + targetElement.style.transform = `translate(${0}px, ${0}px) scale(${1})`; + + // Get scrollbar width to right-align the image + const scrollbarWidth = + window.innerWidth - document.documentElement.clientWidth; + + // Get element and screen dimensions + const elementWidth = targetElement.offsetWidth; + const elementHeight = targetElement.offsetHeight; + const screenWidth = window.innerWidth - scrollbarWidth; + const screenHeight = window.innerHeight; + + // Get element's coordinates relative to the page + const elementRect = targetElement.getBoundingClientRect(); + const elementY = elementRect.y; + const elementX = elementRect.x; + + // Calculate scale and offsets + const scaleX = screenWidth / elementWidth; + const scaleY = screenHeight / elementHeight; + const scale = Math.min(scaleX, scaleY); + + // Get the current transformOrigin + const computedStyle = window.getComputedStyle(targetElement); + const transformOrigin = computedStyle.transformOrigin; + const [originX, originY] = transformOrigin.split(" "); + const originXValue = parseFloat(originX); + const originYValue = parseFloat(originY); + + // Calculate offsets with respect to the transformOrigin + const offsetX = + (screenWidth - elementWidth * scale) / 2 - + elementX - + originXValue * (1 - scale); + const offsetY = + (screenHeight - elementHeight * scale) / 2 - + elementY - + originYValue * (1 - scale); + + // Apply scale and offsets to the element + targetElement.style.transform = `translate(${offsetX}px, ${offsetY}px) scale(${scale})`; + + // Update global variables + elemData[elemId].zoomLevel = scale; + elemData[elemId].panX = offsetX; + elemData[elemId].panY = offsetY; + + fullScreenMode = true; + toggleOverlap("on"); + } + + // Handle keydown events + function handleKeyDown(event) { + // Disable key locks to make pasting from the buffer work correctly + if ((event.ctrlKey && event.code === 'KeyV') || (event.ctrlKey && event.code === 'KeyC') || event.code === "F5") { + return; + } + + // before activating shortcut, ensure user is not actively typing in an input field + if (!hotkeysConfig.canvas_blur_prompt) { + if (event.target.nodeName === 'TEXTAREA' || event.target.nodeName === 'INPUT') { + return; + } + } + + const hotkeyActions = { + [hotkeysConfig.canvas_hotkey_reset]: resetZoom, + [hotkeysConfig.canvas_hotkey_overlap]: toggleOverlap, + [hotkeysConfig.canvas_hotkey_fullscreen]: fitToScreen, + [hotkeysConfig.canvas_zoom_hotkey_undo]: undoLastAction, + }; + + const action = hotkeyActions[event.code]; + if (action) { + event.preventDefault(); + action(event); + } + + if ( + isModifierKey(event, hotkeysConfig.canvas_hotkey_zoom) || + isModifierKey(event, hotkeysConfig.canvas_hotkey_adjust) + ) { + event.preventDefault(); + } + } + + // Get Mouse position + function getMousePosition(e) { + mouseX = e.offsetX; + mouseY = e.offsetY; + } + + // Simulation of the function to put a long image into the screen. + // We detect if an image has a scroll bar or not, make a fullscreen to reveal the image, then reduce it to fit into the element. + // We hide the image and show it to the user when it is ready. + + targetElement.isExpanded = false; + function autoExpand() { + const canvas = document.querySelector(`${elemId} canvas[key="interface"]`); + if (canvas) { + if (hasHorizontalScrollbar(targetElement) && targetElement.isExpanded === false) { + targetElement.style.visibility = "hidden"; + setTimeout(() => { + fitToScreen(); + resetZoom(); + targetElement.style.visibility = "visible"; + targetElement.isExpanded = true; + }, 10); + } + } + } + + targetElement.addEventListener("mousemove", getMousePosition); + targetElement.addEventListener("auxclick", undoLastAction); + + //observers + // Creating an observer with a callback function to handle DOM changes + const observer = new MutationObserver((mutationsList, observer) => { + for (let mutation of mutationsList) { + // If the style attribute of the canvas has changed, by observation it happens only when the picture changes + if (mutation.type === 'attributes' && mutation.attributeName === 'style' && + mutation.target.tagName.toLowerCase() === 'canvas') { + targetElement.isExpanded = false; + setTimeout(resetZoom, 10); + } + } + }); + + // Apply auto expand if enabled + if (hotkeysConfig.canvas_auto_expand) { + targetElement.addEventListener("mousemove", autoExpand); + // Set up an observer to track attribute changes + observer.observe(targetElement, { attributes: true, childList: true, subtree: true }); + } + + // Handle events only inside the targetElement + let isKeyDownHandlerAttached = false; + + function handleMouseMove() { + if (!isKeyDownHandlerAttached) { + document.addEventListener("keydown", handleKeyDown); + isKeyDownHandlerAttached = true; + + activeElement = elemId; + } + } + + function handleMouseLeave() { + if (isKeyDownHandlerAttached) { + document.removeEventListener("keydown", handleKeyDown); + isKeyDownHandlerAttached = false; + + activeElement = null; + } + } + + // Add mouse event handlers + targetElement.addEventListener("mousemove", handleMouseMove); + targetElement.addEventListener("mouseleave", handleMouseLeave); + + targetElement.addEventListener("wheel", e => { + // change zoom level + const operation = e.deltaY > 0 ? "-" : "+"; + changeZoomLevel(operation, e); + + // Handle brush size adjustment with ctrl key pressed + if (isModifierKey(e, hotkeysConfig.canvas_hotkey_adjust)) { + e.preventDefault(); + + // Increase or decrease brush size based on scroll direction + adjustBrushSize(elemId, e.deltaY); + } + }); + + // Handle the move event for pan functionality. Updates the panX and panY variables and applies the new transform to the target element. + function handleMoveKeyDown(e) { + + // Disable key locks to make pasting from the buffer work correctly + if ((e.ctrlKey && e.code === 'KeyV') || (e.ctrlKey && e.code === 'KeyC') || e.code === "F5") { + return; + } + + // before activating shortcut, ensure user is not actively typing in an input field + if (!hotkeysConfig.canvas_blur_prompt) { + if (e.target.nodeName === 'TEXTAREA' || e.target.nodeName === 'INPUT') { + return; + } + } + + + if (e.code === hotkeysConfig.canvas_hotkey_move) { + if (!e.ctrlKey && !e.metaKey && isKeyDownHandlerAttached) { + e.preventDefault(); + document.activeElement.blur(); + isMoving = true; + } + } + } + + function handleMoveKeyUp(e) { + if (e.code === hotkeysConfig.canvas_hotkey_move) { + isMoving = false; + } + } + + document.addEventListener("keydown", handleMoveKeyDown); + document.addEventListener("keyup", handleMoveKeyUp); + + // Detect zoom level and update the pan speed. + function updatePanPosition(movementX, movementY) { + let panSpeed = 2; + + if (elemData[elemId].zoomLevel > 8) { + panSpeed = 3.5; + } + + elemData[elemId].panX += movementX * panSpeed; + elemData[elemId].panY += movementY * panSpeed; + + // Delayed redraw of an element + requestAnimationFrame(() => { + targetElement.style.transform = `translate(${elemData[elemId].panX}px, ${elemData[elemId].panY}px) scale(${elemData[elemId].zoomLevel})`; + toggleOverlap("on"); + }); + } + + function handleMoveByKey(e) { + if (isMoving && elemId === activeElement) { + updatePanPosition(e.movementX, e.movementY); + targetElement.style.pointerEvents = "none"; + targetElement.style.overflow = "visible"; + } else { + targetElement.style.pointerEvents = "auto"; + } + } + + // Prevents sticking to the mouse + window.onblur = function() { + isMoving = false; + }; + + // Checks for extension + function checkForOutBox() { + const parentElement = targetElement.closest('[id^="component-"]'); + if (parentElement.offsetWidth < targetElement.offsetWidth && !targetElement.isExpanded) { + resetZoom(); + targetElement.isExpanded = true; + } + + if (parentElement.offsetWidth < targetElement.offsetWidth && elemData[elemId].zoomLevel == 1) { + resetZoom(); + } + + if (parentElement.offsetWidth < targetElement.offsetWidth && targetElement.offsetWidth * elemData[elemId].zoomLevel > parentElement.offsetWidth && elemData[elemId].zoomLevel < 1 && !targetElement.isZoomed) { + resetZoom(); + } + } + + targetElement.addEventListener("mousemove", checkForOutBox); + + window.addEventListener('resize', (e) => { + resetZoom(); + + targetElement.isExpanded = false; + targetElement.isZoomed = false; + }); + + gradioApp().addEventListener("mousemove", handleMoveByKey); + } + + applyZoomAndPan("#inpaint_canvas"); +}); diff --git a/language/en.json b/language/en.json new file mode 100644 index 0000000000000000000000000000000000000000..f61255c963d3f2be4c2f23df45a52b56ef18bf88 --- /dev/null +++ b/language/en.json @@ -0,0 +1,385 @@ +{ + "Preview": "Preview", + "Gallery": "Gallery", + "Generate": "Generate", + "Skip": "Skip", + "Stop": "Stop", + "Input Image": "Input Image", + "Advanced": "Advanced", + "Upscale or Variation": "Upscale or Variation", + "Image Prompt": "Image Prompt", + "Inpaint or Outpaint (beta)": "Inpaint or Outpaint (beta)", + "Drag above image to here": "Drag above image to here", + "Upscale or Variation:": "Upscale or Variation:", + "Disabled": "Disabled", + "Vary (Subtle)": "Vary (Subtle)", + "Vary (Strong)": "Vary (Strong)", + "Upscale (1.5x)": "Upscale (1.5x)", + "Upscale (2x)": "Upscale (2x)", + "Upscale (Fast 2x)": "Upscale (Fast 2x)", + "\ud83d\udcd4 Document": "\uD83D\uDCD4 Document", + "Image": "Image", + "Stop At": "Stop At", + "Weight": "Weight", + "Type": "Type", + "PyraCanny": "PyraCanny", + "CPDS": "CPDS", + "* \"Image Prompt\" is powered by Fooocus Image Mixture Engine (v1.0.1).": "* \"Image Prompt\" is powered by Fooocus Image Mixture Engine (v1.0.1).", + "The scaler multiplied to positive ADM (use 1.0 to disable).": "The scaler multiplied to positive ADM (use 1.0 to disable).", + "The scaler multiplied to negative ADM (use 1.0 to disable).": "The scaler multiplied to negative ADM (use 1.0 to disable).", + "When to end the guidance from positive/negative ADM.": "When to end the guidance from positive/negative ADM.", + "Similar to the Control Mode in A1111 (use 0.0 to disable).": "Similar to the Control Mode in A1111 (use 0.0 to disable).", + "Outpaint Expansion (": "Outpaint Expansion (", + "Outpaint": "Outpaint", + "Left": "Left", + "Right": "Right", + "Top": "Top", + "Bottom": "Bottom", + "* \"Inpaint or Outpaint\" is powered by the sampler \"DPMPP Fooocus Seamless 2M SDE Karras Inpaint Sampler\" (beta)": "* \"Inpaint or Outpaint\" is powered by the sampler \"DPMPP Fooocus Seamless 2M SDE Karras Inpaint Sampler\" (beta)", + "Setting": "Setting", + "Style": "Style", + "Performance": "Performance", + "Speed": "Speed", + "Quality": "Quality", + "Aspect Ratios": "Aspect Ratios", + "width \u00d7 height": "width \u00d7 height", + "Image Number": "Image Number", + "Negative Prompt": "Negative Prompt", + "Describing what you do not want to see.": "Describing what you do not want to see.", + "Random": "Random", + "Seed": "Seed", + "Disable seed increment": "Disable seed increment", + "Disable automatic seed increment when image number is > 1.": "Disable automatic seed increment when image number is > 1.", + "\ud83d\udcda History Log": "\uD83D\uDCDA History Log", + "Image Style": "Image Style", + "Fooocus V2": "Fooocus V2", + "Default (Slightly Cinematic)": "Default (Slightly Cinematic)", + "Fooocus Masterpiece": "Fooocus Masterpiece", + "Fooocus Photograph": "Fooocus Photograph", + "Fooocus Negative": "Fooocus Negative", + "SAI 3D Model": "SAI 3D Model", + "SAI Analog Film": "SAI Analog Film", + "SAI Anime": "SAI Anime", + "SAI Cinematic": "SAI Cinematic", + "SAI Comic Book": "SAI Comic Book", + "SAI Craft Clay": "SAI Craft Clay", + "SAI Digital Art": "SAI Digital Art", + "SAI Enhance": "SAI Enhance", + "SAI Fantasy Art": "SAI Fantasy Art", + "SAI Isometric": "SAI Isometric", + "SAI Line Art": "SAI Line Art", + "SAI Lowpoly": "SAI Lowpoly", + "SAI Neonpunk": "SAI Neonpunk", + "SAI Origami": "SAI Origami", + "SAI Photographic": "SAI Photographic", + "SAI Pixel Art": "SAI Pixel Art", + "SAI Texture": "SAI Texture", + "MRE Cinematic Dynamic": "MRE Cinematic Dynamic", + "MRE Spontaneous Picture": "MRE Spontaneous Picture", + "MRE Artistic Vision": "MRE Artistic Vision", + "MRE Dark Dream": "MRE Dark Dream", + "MRE Gloomy Art": "MRE Gloomy Art", + "MRE Bad Dream": "MRE Bad Dream", + "MRE Underground": "MRE Underground", + "MRE Surreal Painting": "MRE Surreal Painting", + "MRE Dynamic Illustration": "MRE Dynamic Illustration", + "MRE Undead Art": "MRE Undead Art", + "MRE Elemental Art": "MRE Elemental Art", + "MRE Space Art": "MRE Space Art", + "MRE Ancient Illustration": "MRE Ancient Illustration", + "MRE Brave Art": "MRE Brave Art", + "MRE Heroic Fantasy": "MRE Heroic Fantasy", + "MRE Dark Cyberpunk": "MRE Dark Cyberpunk", + "MRE Lyrical Geometry": "MRE Lyrical Geometry", + "MRE Sumi E Symbolic": "MRE Sumi E Symbolic", + "MRE Sumi E Detailed": "MRE Sumi E Detailed", + "MRE Manga": "MRE Manga", + "MRE Anime": "MRE Anime", + "MRE Comic": "MRE Comic", + "Ads Advertising": "Ads Advertising", + "Ads Automotive": "Ads Automotive", + "Ads Corporate": "Ads Corporate", + "Ads Fashion Editorial": "Ads Fashion Editorial", + "Ads Food Photography": "Ads Food Photography", + "Ads Gourmet Food Photography": "Ads Gourmet Food Photography", + "Ads Luxury": "Ads Luxury", + "Ads Real Estate": "Ads Real Estate", + "Ads Retail": "Ads Retail", + "Artstyle Abstract": "Artstyle Abstract", + "Artstyle Abstract Expressionism": "Artstyle Abstract Expressionism", + "Artstyle Art Deco": "Artstyle Art Deco", + "Artstyle Art Nouveau": "Artstyle Art Nouveau", + "Artstyle Constructivist": "Artstyle Constructivist", + "Artstyle Cubist": "Artstyle Cubist", + "Artstyle Expressionist": "Artstyle Expressionist", + "Artstyle Graffiti": "Artstyle Graffiti", + "Artstyle Hyperrealism": "Artstyle Hyperrealism", + "Artstyle Impressionist": "Artstyle Impressionist", + "Artstyle Pointillism": "Artstyle Pointillism", + "Artstyle Pop Art": "Artstyle Pop Art", + "Artstyle Psychedelic": "Artstyle Psychedelic", + "Artstyle Renaissance": "Artstyle Renaissance", + "Artstyle Steampunk": "Artstyle Steampunk", + "Artstyle Surrealist": "Artstyle Surrealist", + "Artstyle Typography": "Artstyle Typography", + "Artstyle Watercolor": "Artstyle Watercolor", + "Futuristic Biomechanical": "Futuristic Biomechanical", + "Futuristic Biomechanical Cyberpunk": "Futuristic Biomechanical Cyberpunk", + "Futuristic Cybernetic": "Futuristic Cybernetic", + "Futuristic Cybernetic Robot": "Futuristic Cybernetic Robot", + "Futuristic Cyberpunk Cityscape": "Futuristic Cyberpunk Cityscape", + "Futuristic Futuristic": "Futuristic Futuristic", + "Futuristic Retro Cyberpunk": "Futuristic Retro Cyberpunk", + "Futuristic Retro Futurism": "Futuristic Retro Futurism", + "Futuristic Sci Fi": "Futuristic Sci Fi", + "Futuristic Vaporwave": "Futuristic Vaporwave", + "Game Bubble Bobble": "Game Bubble Bobble", + "Game Cyberpunk Game": "Game Cyberpunk Game", + "Game Fighting Game": "Game Fighting Game", + "Game Gta": "Game Gta", + "Game Mario": "Game Mario", + "Game Minecraft": "Game Minecraft", + "Game Pokemon": "Game Pokemon", + "Game Retro Arcade": "Game Retro Arcade", + "Game Retro Game": "Game Retro Game", + "Game Rpg Fantasy Game": "Game Rpg Fantasy Game", + "Game Strategy Game": "Game Strategy Game", + "Game Streetfighter": "Game Streetfighter", + "Game Zelda": "Game Zelda", + "Misc Architectural": "Misc Architectural", + "Misc Disco": "Misc Disco", + "Misc Dreamscape": "Misc Dreamscape", + "Misc Dystopian": "Misc Dystopian", + "Misc Fairy Tale": "Misc Fairy Tale", + "Misc Gothic": "Misc Gothic", + "Misc Grunge": "Misc Grunge", + "Misc Horror": "Misc Horror", + "Misc Kawaii": "Misc Kawaii", + "Misc Lovecraftian": "Misc Lovecraftian", + "Misc Macabre": "Misc Macabre", + "Misc Manga": "Misc Manga", + "Misc Metropolis": "Misc Metropolis", + "Misc Minimalist": "Misc Minimalist", + "Misc Monochrome": "Misc Monochrome", + "Misc Nautical": "Misc Nautical", + "Misc Space": "Misc Space", + "Misc Stained Glass": "Misc Stained Glass", + "Misc Techwear Fashion": "Misc Techwear Fashion", + "Misc Tribal": "Misc Tribal", + "Misc Zentangle": "Misc Zentangle", + "Papercraft Collage": "Papercraft Collage", + "Papercraft Flat Papercut": "Papercraft Flat Papercut", + "Papercraft Kirigami": "Papercraft Kirigami", + "Papercraft Paper Mache": "Papercraft Paper Mache", + "Papercraft Paper Quilling": "Papercraft Paper Quilling", + "Papercraft Papercut Collage": "Papercraft Papercut Collage", + "Papercraft Papercut Shadow Box": "Papercraft Papercut Shadow Box", + "Papercraft Stacked Papercut": "Papercraft Stacked Papercut", + "Papercraft Thick Layered Papercut": "Papercraft Thick Layered Papercut", + "Photo Alien": "Photo Alien", + "Photo Film Noir": "Photo Film Noir", + "Photo Glamour": "Photo Glamour", + "Photo Hdr": "Photo Hdr", + "Photo Iphone Photographic": "Photo Iphone Photographic", + "Photo Long Exposure": "Photo Long Exposure", + "Photo Neon Noir": "Photo Neon Noir", + "Photo Silhouette": "Photo Silhouette", + "Photo Tilt Shift": "Photo Tilt Shift", + "Cinematic Diva": "Cinematic Diva", + "Abstract Expressionism": "Abstract Expressionism", + "Academia": "Academia", + "Action Figure": "Action Figure", + "Adorable 3D Character": "Adorable 3D Character", + "Adorable Kawaii": "Adorable Kawaii", + "Art Deco": "Art Deco", + "Art Nouveau": "Art Nouveau", + "Astral Aura": "Astral Aura", + "Avant Garde": "Avant Garde", + "Baroque": "Baroque", + "Bauhaus Style Poster": "Bauhaus Style Poster", + "Blueprint Schematic Drawing": "Blueprint Schematic Drawing", + "Caricature": "Caricature", + "Cel Shaded Art": "Cel Shaded Art", + "Character Design Sheet": "Character Design Sheet", + "Classicism Art": "Classicism Art", + "Color Field Painting": "Color Field Painting", + "Colored Pencil Art": "Colored Pencil Art", + "Conceptual Art": "Conceptual Art", + "Constructivism": "Constructivism", + "Cubism": "Cubism", + "Dadaism": "Dadaism", + "Dark Fantasy": "Dark Fantasy", + "Dark Moody Atmosphere": "Dark Moody Atmosphere", + "Dmt Art Style": "Dmt Art Style", + "Doodle Art": "Doodle Art", + "Double Exposure": "Double Exposure", + "Dripping Paint Splatter Art": "Dripping Paint Splatter Art", + "Expressionism": "Expressionism", + "Faded Polaroid Photo": "Faded Polaroid Photo", + "Fauvism": "Fauvism", + "Flat 2d Art": "Flat 2d Art", + "Fortnite Art Style": "Fortnite Art Style", + "Futurism": "Futurism", + "Glitchcore": "Glitchcore", + "Glo Fi": "Glo Fi", + "Googie Art Style": "Googie Art Style", + "Graffiti Art": "Graffiti Art", + "Harlem Renaissance Art": "Harlem Renaissance Art", + "High Fashion": "High Fashion", + "Idyllic": "Idyllic", + "Impressionism": "Impressionism", + "Infographic Drawing": "Infographic Drawing", + "Ink Dripping Drawing": "Ink Dripping Drawing", + "Japanese Ink Drawing": "Japanese Ink Drawing", + "Knolling Photography": "Knolling Photography", + "Light Cheery Atmosphere": "Light Cheery Atmosphere", + "Logo Design": "Logo Design", + "Luxurious Elegance": "Luxurious Elegance", + "Macro Photography": "Macro Photography", + "Mandola Art": "Mandola Art", + "Marker Drawing": "Marker Drawing", + "Medievalism": "Medievalism", + "Minimalism": "Minimalism", + "Neo Baroque": "Neo Baroque", + "Neo Byzantine": "Neo Byzantine", + "Neo Futurism": "Neo Futurism", + "Neo Impressionism": "Neo Impressionism", + "Neo Rococo": "Neo Rococo", + "Neoclassicism": "Neoclassicism", + "Op Art": "Op Art", + "Ornate And Intricate": "Ornate And Intricate", + "Pencil Sketch Drawing": "Pencil Sketch Drawing", + "Pop Art 2": "Pop Art 2", + "Rococo": "Rococo", + "Silhouette Art": "Silhouette Art", + "Simple Vector Art": "Simple Vector Art", + "Sketchup": "Sketchup", + "Steampunk 2": "Steampunk 2", + "Surrealism": "Surrealism", + "Suprematism": "Suprematism", + "Terragen": "Terragen", + "Tranquil Relaxing Atmosphere": "Tranquil Relaxing Atmosphere", + "Sticker Designs": "Sticker Designs", + "Vibrant Rim Light": "Vibrant Rim Light", + "Volumetric Lighting": "Volumetric Lighting", + "Watercolor 2": "Watercolor 2", + "Whimsical And Playful": "Whimsical And Playful", + "Model": "Model", + "Base Model (SDXL only)": "Base Model (SDXL only)", + "sd_xl_base_1.0_0.9vae.safetensors": "sd_xl_base_1.0_0.9vae.safetensors", + "bluePencilXL_v009.safetensors": "bluePencilXL_v009.safetensors", + "bluePencilXL_v050.safetensors": "bluePencilXL_v050.safetensors", + "DreamShaper_8_pruned.safetensors": "DreamShaper_8_pruned.safetensors", + "realisticStockPhoto_v10.safetensors": "realisticStockPhoto_v10.safetensors", + "realisticVisionV51_v51VAE.safetensors": "realisticVisionV51_v51VAE.safetensors", + "sd_xl_refiner_1.0_0.9vae.safetensors": "sd_xl_refiner_1.0_0.9vae.safetensors", + "Refiner (SDXL or SD 1.5)": "Refiner (SDXL or SD 1.5)", + "None": "None", + "LoRAs": "LoRAs", + "SDXL LoRA 1": "SDXL LoRA 1", + "sd_xl_offset_example-lora_1.0.safetensors": "sd_xl_offset_example-lora_1.0.safetensors", + "3d_render_style_xl.safetensors": "3d_render_style_xl.safetensors", + "Bloodstained-XL-V1.safetensors": "Bloodstained-XL-V1.safetensors", + "SDXL_FILM_PHOTOGRAPHY_STYLE_BetaV0.4.safetensors": "SDXL_FILM_PHOTOGRAPHY_STYLE_BetaV0.4.safetensors", + "SDXL LoRA 2": "SDXL LoRA 2", + "SDXL LoRA 3": "SDXL LoRA 3", + "SDXL LoRA 4": "SDXL LoRA 4", + "SDXL LoRA 5": "SDXL LoRA 5", + "Refresh": "Refresh", + "\ud83d\udd04 Refresh All Files": "\ud83d\udd04 Refresh All Files", + "Sampling Sharpness": "Sampling Sharpness", + "Higher value means image and texture are sharper.": "Higher value means image and texture are sharper.", + "Guidance Scale": "Guidance Scale", + "Higher value means style is cleaner, vivider, and more artistic.": "Higher value means style is cleaner, vivider, and more artistic.", + "Developer Debug Mode": "Developer Debug Mode", + "Developer Debug Tools": "Developer Debug Tools", + "Positive ADM Guidance Scaler": "Positive ADM Guidance Scaler", + "The scaler multiplied to positive ADM (use 1.0 to disable). ": "The scaler multiplied to positive ADM (use 1.0 to disable). ", + "Negative ADM Guidance Scaler": "Negative ADM Guidance Scaler", + "The scaler multiplied to negative ADM (use 1.0 to disable). ": "The scaler multiplied to negative ADM (use 1.0 to disable). ", + "ADM Guidance End At Step": "ADM Guidance End At Step", + "When to end the guidance from positive/negative ADM. ": "When to end the guidance from positive/negative ADM. ", + "Refiner swap method": "Refiner swap method", + "joint": "joint", + "separate": "separate", + "vae": "vae", + "CFG Mimicking from TSNR": "CFG Mimicking from TSNR", + "Enabling Fooocus's implementation of CFG mimicking for TSNR (effective when real CFG > mimicked CFG).": "Enabling Fooocus's implementation of CFG mimicking for TSNR (effective when real CFG > mimicked CFG).", + "Sampler": "Sampler", + "dpmpp_2m_sde_gpu": "dpmpp_2m_sde_gpu", + "Only effective in non-inpaint mode.": "Only effective in non-inpaint mode.", + "euler": "euler", + "euler_ancestral": "euler_ancestral", + "heun": "heun", + "dpm_2": "dpm_2", + "dpm_2_ancestral": "dpm_2_ancestral", + "lms": "lms", + "dpm_fast": "dpm_fast", + "dpm_adaptive": "dpm_adaptive", + "dpmpp_2s_ancestral": "dpmpp_2s_ancestral", + "dpmpp_sde": "dpmpp_sde", + "dpmpp_sde_gpu": "dpmpp_sde_gpu", + "dpmpp_2m": "dpmpp_2m", + "dpmpp_2m_sde": "dpmpp_2m_sde", + "dpmpp_3m_sde": "dpmpp_3m_sde", + "dpmpp_3m_sde_gpu": "dpmpp_3m_sde_gpu", + "ddpm": "ddpm", + "ddim": "ddim", + "uni_pc": "uni_pc", + "uni_pc_bh2": "uni_pc_bh2", + "Scheduler": "Scheduler", + "karras": "karras", + "Scheduler of Sampler.": "Scheduler of Sampler.", + "normal": "normal", + "exponential": "exponential", + "sgm_uniform": "sgm_uniform", + "simple": "simple", + "ddim_uniform": "ddim_uniform", + "Forced Overwrite of Sampling Step": "Forced Overwrite of Sampling Step", + "Set as -1 to disable. For developer debugging.": "Set as -1 to disable. For developer debugging.", + "Forced Overwrite of Refiner Switch Step": "Forced Overwrite of Refiner Switch Step", + "Forced Overwrite of Generating Width": "Forced Overwrite of Generating Width", + "Set as -1 to disable. For developer debugging. Results will be worse for non-standard numbers that SDXL is not trained on.": "Set as -1 to disable. For developer debugging. Results will be worse for non-standard numbers that SDXL is not trained on.", + "Forced Overwrite of Generating Height": "Forced Overwrite of Generating Height", + "Forced Overwrite of Denoising Strength of \"Vary\"": "Forced Overwrite of Denoising Strength of \"Vary\"", + "Set as negative number to disable. For developer debugging.": "Set as negative number to disable. For developer debugging.", + "Forced Overwrite of Denoising Strength of \"Upscale\"": "Forced Overwrite of Denoising Strength of \"Upscale\"", + "Disable Preview": "Disable Preview", + "Disable preview during generation.": "Disable preview during generation.", + "Disable Intermediate Results": "Disable Intermediate Results", + "Disable intermediate results during generation, only show final gallery.": "Disable intermediate results during generation, only show final gallery.", + "Inpaint Engine": "Inpaint Engine", + "v1": "v1", + "Version of Fooocus inpaint model": "Version of Fooocus inpaint model", + "v2.5": "v2.5", + "Control Debug": "Control Debug", + "Debug Preprocessors": "Debug Preprocessors", + "Mixing Image Prompt and Vary/Upscale": "Mixing Image Prompt and Vary/Upscale", + "Mixing Image Prompt and Inpaint": "Mixing Image Prompt and Inpaint", + "Softness of ControlNet": "Softness of ControlNet", + "Similar to the Control Mode in A1111 (use 0.0 to disable). ": "Similar to the Control Mode in A1111 (use 0.0 to disable). ", + "Canny": "Canny", + "Canny Low Threshold": "Canny Low Threshold", + "Canny High Threshold": "Canny High Threshold", + "FreeU": "FreeU", + "Enabled": "Enabled", + "B1": "B1", + "B2": "B2", + "S1": "S1", + "S2": "S2", + "Extreme Speed": "Extreme Speed", + "\uD83D\uDD0E Type here to search styles ...": "\uD83D\uDD0E Type here to search styles ...", + "Type prompt here.": "Type prompt here.", + "Outpaint Expansion Direction:": "Outpaint Expansion Direction:", + "* Powered by Fooocus Inpaint Engine (beta)": "* Powered by Fooocus Inpaint Engine (beta)", + "Fooocus Enhance": "Fooocus Enhance", + "Fooocus Cinematic": "Fooocus Cinematic", + "Fooocus Sharp": "Fooocus Sharp", + "Drag any image generated by Fooocus here": "Drag any image generated by Fooocus here", + "Metadata": "Metadata", + "Apply Metadata": "Apply Metadata", + "Metadata Scheme": "Metadata Scheme", + "Image Prompt parameters are not included. Use png and a1111 for compatibility with Civitai.": "Image Prompt parameters are not included. Use png and a1111 for compatibility with Civitai.", + "fooocus (json)": "fooocus (json)", + "a1111 (plain text)": "a1111 (plain text)" +} \ No newline at end of file diff --git a/language/example.json b/language/example.json new file mode 100644 index 0000000000000000000000000000000000000000..9b792449029bba64f2e190a03ac8459f6ccb84a9 --- /dev/null +++ b/language/example.json @@ -0,0 +1,6 @@ +{ + "Generate": "生成", + "Input Image": "入力画像", + "Advanced": "고급", + "SAI 3D Model": "SAI 3D Modèle" +} diff --git a/launch.py b/launch.py new file mode 100644 index 0000000000000000000000000000000000000000..f545c39e6d3a9222340affbccf727b763cb2043f --- /dev/null +++ b/launch.py @@ -0,0 +1,127 @@ +import os +import sys +import ssl + +print('[System ARGV] ' + str(sys.argv)) + +root = os.path.dirname(os.path.abspath(__file__)) +sys.path.append(root) +os.chdir(root) + +os.environ["PYTORCH_ENABLE_MPS_FALLBACK"] = "1" +os.environ["PYTORCH_MPS_HIGH_WATERMARK_RATIO"] = "0.0" +if "GRADIO_SERVER_PORT" not in os.environ: + os.environ["GRADIO_SERVER_PORT"] = "7865" + +ssl._create_default_https_context = ssl._create_unverified_context + + +import platform +import fooocus_version + +from build_launcher import build_launcher +from modules.launch_util import is_installed, run, python, run_pip, requirements_met +from modules.model_loader import load_file_from_url + + +REINSTALL_ALL = False +TRY_INSTALL_XFORMERS = False + + +def prepare_environment(): + torch_index_url = os.environ.get('TORCH_INDEX_URL', "https://download.pytorch.org/whl/cu121") + torch_command = os.environ.get('TORCH_COMMAND', + f"pip install torch==2.1.0 torchvision==0.16.0 --extra-index-url {torch_index_url}") + requirements_file = os.environ.get('REQS_FILE', "requirements_versions.txt") + + print(f"Python {sys.version}") + print(f"Fooocus version: {fooocus_version.version}") + + if REINSTALL_ALL or not is_installed("torch") or not is_installed("torchvision"): + run(f'"{python}" -m {torch_command}', "Installing torch and torchvision", "Couldn't install torch", live=True) + + if TRY_INSTALL_XFORMERS: + if REINSTALL_ALL or not is_installed("xformers"): + xformers_package = os.environ.get('XFORMERS_PACKAGE', 'xformers==0.0.23') + if platform.system() == "Windows": + if platform.python_version().startswith("3.10"): + run_pip(f"install -U -I --no-deps {xformers_package}", "xformers", live=True) + else: + print("Installation of xformers is not supported in this version of Python.") + print( + "You can also check this and build manually: https://github.com/AUTOMATIC1111/stable-diffusion-webui/wiki/Xformers#building-xformers-on-windows-by-duckness") + if not is_installed("xformers"): + exit(0) + elif platform.system() == "Linux": + run_pip(f"install -U -I --no-deps {xformers_package}", "xformers") + + if REINSTALL_ALL or not requirements_met(requirements_file): + run_pip(f"install -r \"{requirements_file}\"", "requirements") + + return + + +vae_approx_filenames = [ + ('xlvaeapp.pth', 'https://huggingface.co/lllyasviel/misc/resolve/main/xlvaeapp.pth'), + ('vaeapp_sd15.pth', 'https://huggingface.co/lllyasviel/misc/resolve/main/vaeapp_sd15.pt'), + ('xl-to-v1_interposer-v3.1.safetensors', + 'https://huggingface.co/lllyasviel/misc/resolve/main/xl-to-v1_interposer-v3.1.safetensors') +] + +def ini_args(): + from args_manager import args + return args + + +prepare_environment() +build_launcher() +args = ini_args() + + +if args.gpu_device_id is not None: + os.environ['CUDA_VISIBLE_DEVICES'] = str(args.gpu_device_id) + print("Set device to:", args.gpu_device_id) + + +from modules import config + +def download_models(): + for file_name, url in vae_approx_filenames: + load_file_from_url(url=url, model_dir=config.path_vae_approx, file_name=file_name) + + load_file_from_url( + url='https://huggingface.co/lllyasviel/misc/resolve/main/fooocus_expansion.bin', + model_dir=config.path_fooocus_expansion, + file_name='pytorch_model.bin' + ) + + if args.disable_preset_download: + print('Skipped model download.') + return + + if not args.always_download_new_model: + if not os.path.exists(os.path.join(config.paths_checkpoints[0], config.default_base_model_name)): + for alternative_model_name in config.previous_default_models: + if os.path.exists(os.path.join(config.paths_checkpoints[0], alternative_model_name)): + print(f'You do not have [{config.default_base_model_name}] but you have [{alternative_model_name}].') + print(f'Fooocus will use [{alternative_model_name}] to avoid downloading new models, ' + f'but you are not using latest models.') + print('Use --always-download-new-model to avoid fallback and always get new models.') + config.checkpoint_downloads = {} + config.default_base_model_name = alternative_model_name + break + + for file_name, url in config.checkpoint_downloads.items(): + load_file_from_url(url=url, model_dir=config.paths_checkpoints[0], file_name=file_name) + for file_name, url in config.embeddings_downloads.items(): + load_file_from_url(url=url, model_dir=config.path_embeddings, file_name=file_name) + for file_name, url in config.lora_downloads.items(): + load_file_from_url(url=url, model_dir=config.paths_loras[0], file_name=file_name) + + return + + +download_models() + + +from webui import * diff --git a/ldm_patched/contrib/__pycache__/external.cpython-310.pyc b/ldm_patched/contrib/__pycache__/external.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2a1e853877c8c15097fdc5050b9b88f37e530234 Binary files /dev/null and b/ldm_patched/contrib/__pycache__/external.cpython-310.pyc differ diff --git a/ldm_patched/contrib/__pycache__/external_custom_sampler.cpython-310.pyc b/ldm_patched/contrib/__pycache__/external_custom_sampler.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..58cd275d023b93bba8605fce353a86e559cac6e1 Binary files /dev/null and b/ldm_patched/contrib/__pycache__/external_custom_sampler.cpython-310.pyc differ diff --git a/ldm_patched/contrib/__pycache__/external_freelunch.cpython-310.pyc b/ldm_patched/contrib/__pycache__/external_freelunch.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cb4f0b5847bf3c7ec725aae5eed38c1dfb06f22a Binary files /dev/null and b/ldm_patched/contrib/__pycache__/external_freelunch.cpython-310.pyc differ diff --git a/ldm_patched/contrib/__pycache__/external_model_advanced.cpython-310.pyc b/ldm_patched/contrib/__pycache__/external_model_advanced.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..788da4de8f3485d80f16147b0f6291a943d61e24 Binary files /dev/null and b/ldm_patched/contrib/__pycache__/external_model_advanced.cpython-310.pyc differ diff --git a/ldm_patched/contrib/__pycache__/external_upscale_model.cpython-310.pyc b/ldm_patched/contrib/__pycache__/external_upscale_model.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e374363885119fa5146eea92ed0c26134027d6df Binary files /dev/null and b/ldm_patched/contrib/__pycache__/external_upscale_model.cpython-310.pyc differ diff --git a/ldm_patched/contrib/external.py b/ldm_patched/contrib/external.py new file mode 100644 index 0000000000000000000000000000000000000000..927cd3f38a1750168b740f7d32686e20abcb5304 --- /dev/null +++ b/ldm_patched/contrib/external.py @@ -0,0 +1,1954 @@ +# https://github.com/comfyanonymous/ComfyUI/blob/master/nodes.py + +import torch + +import os +import sys +import json +import hashlib +import traceback +import math +import time +import random + +from PIL import Image, ImageOps, ImageSequence +from PIL.PngImagePlugin import PngInfo +import numpy as np +import safetensors.torch + +pass # sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), "ldm_patched")) + + +import ldm_patched.modules.diffusers_load +import ldm_patched.modules.samplers +import ldm_patched.modules.sample +import ldm_patched.modules.sd +import ldm_patched.modules.utils +import ldm_patched.modules.controlnet + +import ldm_patched.modules.clip_vision + +import ldm_patched.modules.model_management +from ldm_patched.modules.args_parser import args + +import importlib + +import ldm_patched.utils.path_utils +import ldm_patched.utils.latent_visualization + +def before_node_execution(): + ldm_patched.modules.model_management.throw_exception_if_processing_interrupted() + +def interrupt_processing(value=True): + ldm_patched.modules.model_management.interrupt_current_processing(value) + +MAX_RESOLUTION=8192 + +class CLIPTextEncode: + @classmethod + def INPUT_TYPES(s): + return {"required": {"text": ("STRING", {"multiline": True}), "clip": ("CLIP", )}} + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "encode" + + CATEGORY = "conditioning" + + def encode(self, clip, text): + tokens = clip.tokenize(text) + cond, pooled = clip.encode_from_tokens(tokens, return_pooled=True) + return ([[cond, {"pooled_output": pooled}]], ) + +class ConditioningCombine: + @classmethod + def INPUT_TYPES(s): + return {"required": {"conditioning_1": ("CONDITIONING", ), "conditioning_2": ("CONDITIONING", )}} + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "combine" + + CATEGORY = "conditioning" + + def combine(self, conditioning_1, conditioning_2): + return (conditioning_1 + conditioning_2, ) + +class ConditioningAverage : + @classmethod + def INPUT_TYPES(s): + return {"required": {"conditioning_to": ("CONDITIONING", ), "conditioning_from": ("CONDITIONING", ), + "conditioning_to_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}) + }} + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "addWeighted" + + CATEGORY = "conditioning" + + def addWeighted(self, conditioning_to, conditioning_from, conditioning_to_strength): + out = [] + + if len(conditioning_from) > 1: + print("Warning: ConditioningAverage conditioning_from contains more than 1 cond, only the first one will actually be applied to conditioning_to.") + + cond_from = conditioning_from[0][0] + pooled_output_from = conditioning_from[0][1].get("pooled_output", None) + + for i in range(len(conditioning_to)): + t1 = conditioning_to[i][0] + pooled_output_to = conditioning_to[i][1].get("pooled_output", pooled_output_from) + t0 = cond_from[:,:t1.shape[1]] + if t0.shape[1] < t1.shape[1]: + t0 = torch.cat([t0] + [torch.zeros((1, (t1.shape[1] - t0.shape[1]), t1.shape[2]))], dim=1) + + tw = torch.mul(t1, conditioning_to_strength) + torch.mul(t0, (1.0 - conditioning_to_strength)) + t_to = conditioning_to[i][1].copy() + if pooled_output_from is not None and pooled_output_to is not None: + t_to["pooled_output"] = torch.mul(pooled_output_to, conditioning_to_strength) + torch.mul(pooled_output_from, (1.0 - conditioning_to_strength)) + elif pooled_output_from is not None: + t_to["pooled_output"] = pooled_output_from + + n = [tw, t_to] + out.append(n) + return (out, ) + +class ConditioningConcat: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "conditioning_to": ("CONDITIONING",), + "conditioning_from": ("CONDITIONING",), + }} + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "concat" + + CATEGORY = "conditioning" + + def concat(self, conditioning_to, conditioning_from): + out = [] + + if len(conditioning_from) > 1: + print("Warning: ConditioningConcat conditioning_from contains more than 1 cond, only the first one will actually be applied to conditioning_to.") + + cond_from = conditioning_from[0][0] + + for i in range(len(conditioning_to)): + t1 = conditioning_to[i][0] + tw = torch.cat((t1, cond_from),1) + n = [tw, conditioning_to[i][1].copy()] + out.append(n) + + return (out, ) + +class ConditioningSetArea: + @classmethod + def INPUT_TYPES(s): + return {"required": {"conditioning": ("CONDITIONING", ), + "width": ("INT", {"default": 64, "min": 64, "max": MAX_RESOLUTION, "step": 8}), + "height": ("INT", {"default": 64, "min": 64, "max": MAX_RESOLUTION, "step": 8}), + "x": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), + "y": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), + "strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + }} + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "append" + + CATEGORY = "conditioning" + + def append(self, conditioning, width, height, x, y, strength): + c = [] + for t in conditioning: + n = [t[0], t[1].copy()] + n[1]['area'] = (height // 8, width // 8, y // 8, x // 8) + n[1]['strength'] = strength + n[1]['set_area_to_bounds'] = False + c.append(n) + return (c, ) + +class ConditioningSetAreaPercentage: + @classmethod + def INPUT_TYPES(s): + return {"required": {"conditioning": ("CONDITIONING", ), + "width": ("FLOAT", {"default": 1.0, "min": 0, "max": 1.0, "step": 0.01}), + "height": ("FLOAT", {"default": 1.0, "min": 0, "max": 1.0, "step": 0.01}), + "x": ("FLOAT", {"default": 0, "min": 0, "max": 1.0, "step": 0.01}), + "y": ("FLOAT", {"default": 0, "min": 0, "max": 1.0, "step": 0.01}), + "strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + }} + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "append" + + CATEGORY = "conditioning" + + def append(self, conditioning, width, height, x, y, strength): + c = [] + for t in conditioning: + n = [t[0], t[1].copy()] + n[1]['area'] = ("percentage", height, width, y, x) + n[1]['strength'] = strength + n[1]['set_area_to_bounds'] = False + c.append(n) + return (c, ) + +class ConditioningSetMask: + @classmethod + def INPUT_TYPES(s): + return {"required": {"conditioning": ("CONDITIONING", ), + "mask": ("MASK", ), + "strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "set_cond_area": (["default", "mask bounds"],), + }} + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "append" + + CATEGORY = "conditioning" + + def append(self, conditioning, mask, set_cond_area, strength): + c = [] + set_area_to_bounds = False + if set_cond_area != "default": + set_area_to_bounds = True + if len(mask.shape) < 3: + mask = mask.unsqueeze(0) + for t in conditioning: + n = [t[0], t[1].copy()] + _, h, w = mask.shape + n[1]['mask'] = mask + n[1]['set_area_to_bounds'] = set_area_to_bounds + n[1]['mask_strength'] = strength + c.append(n) + return (c, ) + +class ConditioningZeroOut: + @classmethod + def INPUT_TYPES(s): + return {"required": {"conditioning": ("CONDITIONING", )}} + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "zero_out" + + CATEGORY = "advanced/conditioning" + + def zero_out(self, conditioning): + c = [] + for t in conditioning: + d = t[1].copy() + if "pooled_output" in d: + d["pooled_output"] = torch.zeros_like(d["pooled_output"]) + n = [torch.zeros_like(t[0]), d] + c.append(n) + return (c, ) + +class ConditioningSetTimestepRange: + @classmethod + def INPUT_TYPES(s): + return {"required": {"conditioning": ("CONDITIONING", ), + "start": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001}), + "end": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001}) + }} + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "set_range" + + CATEGORY = "advanced/conditioning" + + def set_range(self, conditioning, start, end): + c = [] + for t in conditioning: + d = t[1].copy() + d['start_percent'] = start + d['end_percent'] = end + n = [t[0], d] + c.append(n) + return (c, ) + +class VAEDecode: + @classmethod + def INPUT_TYPES(s): + return {"required": { "samples": ("LATENT", ), "vae": ("VAE", )}} + RETURN_TYPES = ("IMAGE",) + FUNCTION = "decode" + + CATEGORY = "latent" + + def decode(self, vae, samples): + return (vae.decode(samples["samples"]), ) + +class VAEDecodeTiled: + @classmethod + def INPUT_TYPES(s): + return {"required": {"samples": ("LATENT", ), "vae": ("VAE", ), + "tile_size": ("INT", {"default": 512, "min": 320, "max": 4096, "step": 64}) + }} + RETURN_TYPES = ("IMAGE",) + FUNCTION = "decode" + + CATEGORY = "_for_testing" + + def decode(self, vae, samples, tile_size): + return (vae.decode_tiled(samples["samples"], tile_x=tile_size // 8, tile_y=tile_size // 8, ), ) + +class VAEEncode: + @classmethod + def INPUT_TYPES(s): + return {"required": { "pixels": ("IMAGE", ), "vae": ("VAE", )}} + RETURN_TYPES = ("LATENT",) + FUNCTION = "encode" + + CATEGORY = "latent" + + @staticmethod + def vae_encode_crop_pixels(pixels): + x = (pixels.shape[1] // 8) * 8 + y = (pixels.shape[2] // 8) * 8 + if pixels.shape[1] != x or pixels.shape[2] != y: + x_offset = (pixels.shape[1] % 8) // 2 + y_offset = (pixels.shape[2] % 8) // 2 + pixels = pixels[:, x_offset:x + x_offset, y_offset:y + y_offset, :] + return pixels + + def encode(self, vae, pixels): + pixels = self.vae_encode_crop_pixels(pixels) + t = vae.encode(pixels[:,:,:,:3]) + return ({"samples":t}, ) + +class VAEEncodeTiled: + @classmethod + def INPUT_TYPES(s): + return {"required": {"pixels": ("IMAGE", ), "vae": ("VAE", ), + "tile_size": ("INT", {"default": 512, "min": 320, "max": 4096, "step": 64}) + }} + RETURN_TYPES = ("LATENT",) + FUNCTION = "encode" + + CATEGORY = "_for_testing" + + def encode(self, vae, pixels, tile_size): + pixels = VAEEncode.vae_encode_crop_pixels(pixels) + t = vae.encode_tiled(pixels[:,:,:,:3], tile_x=tile_size, tile_y=tile_size, ) + return ({"samples":t}, ) + +class VAEEncodeForInpaint: + @classmethod + def INPUT_TYPES(s): + return {"required": { "pixels": ("IMAGE", ), "vae": ("VAE", ), "mask": ("MASK", ), "grow_mask_by": ("INT", {"default": 6, "min": 0, "max": 64, "step": 1}),}} + RETURN_TYPES = ("LATENT",) + FUNCTION = "encode" + + CATEGORY = "latent/inpaint" + + def encode(self, vae, pixels, mask, grow_mask_by=6): + x = (pixels.shape[1] // 8) * 8 + y = (pixels.shape[2] // 8) * 8 + mask = torch.nn.functional.interpolate(mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])), size=(pixels.shape[1], pixels.shape[2]), mode="bilinear") + + pixels = pixels.clone() + if pixels.shape[1] != x or pixels.shape[2] != y: + x_offset = (pixels.shape[1] % 8) // 2 + y_offset = (pixels.shape[2] % 8) // 2 + pixels = pixels[:,x_offset:x + x_offset, y_offset:y + y_offset,:] + mask = mask[:,:,x_offset:x + x_offset, y_offset:y + y_offset] + + #grow mask by a few pixels to keep things seamless in latent space + if grow_mask_by == 0: + mask_erosion = mask + else: + kernel_tensor = torch.ones((1, 1, grow_mask_by, grow_mask_by)) + padding = math.ceil((grow_mask_by - 1) / 2) + + mask_erosion = torch.clamp(torch.nn.functional.conv2d(mask.round(), kernel_tensor, padding=padding), 0, 1) + + m = (1.0 - mask.round()).squeeze(1) + for i in range(3): + pixels[:,:,:,i] -= 0.5 + pixels[:,:,:,i] *= m + pixels[:,:,:,i] += 0.5 + t = vae.encode(pixels) + + return ({"samples":t, "noise_mask": (mask_erosion[:,:,:x,:y].round())}, ) + + +class InpaintModelConditioning: + @classmethod + def INPUT_TYPES(s): + return {"required": {"positive": ("CONDITIONING", ), + "negative": ("CONDITIONING", ), + "vae": ("VAE", ), + "pixels": ("IMAGE", ), + "mask": ("MASK", ), + }} + + RETURN_TYPES = ("CONDITIONING","CONDITIONING","LATENT") + RETURN_NAMES = ("positive", "negative", "latent") + FUNCTION = "encode" + + CATEGORY = "conditioning/inpaint" + + def encode(self, positive, negative, pixels, vae, mask): + x = (pixels.shape[1] // 8) * 8 + y = (pixels.shape[2] // 8) * 8 + mask = torch.nn.functional.interpolate(mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])), size=(pixels.shape[1], pixels.shape[2]), mode="bilinear") + + orig_pixels = pixels + pixels = orig_pixels.clone() + if pixels.shape[1] != x or pixels.shape[2] != y: + x_offset = (pixels.shape[1] % 8) // 2 + y_offset = (pixels.shape[2] % 8) // 2 + pixels = pixels[:,x_offset:x + x_offset, y_offset:y + y_offset,:] + mask = mask[:,:,x_offset:x + x_offset, y_offset:y + y_offset] + + m = (1.0 - mask.round()).squeeze(1) + for i in range(3): + pixels[:,:,:,i] -= 0.5 + pixels[:,:,:,i] *= m + pixels[:,:,:,i] += 0.5 + concat_latent = vae.encode(pixels) + orig_latent = vae.encode(orig_pixels) + + out_latent = {} + + out_latent["samples"] = orig_latent + out_latent["noise_mask"] = mask + + out = [] + for conditioning in [positive, negative]: + c = [] + for t in conditioning: + d = t[1].copy() + d["concat_latent_image"] = concat_latent + d["concat_mask"] = mask + n = [t[0], d] + c.append(n) + out.append(c) + return (out[0], out[1], out_latent) + + +class SaveLatent: + def __init__(self): + self.output_dir = ldm_patched.utils.path_utils.get_output_directory() + + @classmethod + def INPUT_TYPES(s): + return {"required": { "samples": ("LATENT", ), + "filename_prefix": ("STRING", {"default": "latents/ldm_patched"})}, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"}, + } + RETURN_TYPES = () + FUNCTION = "save" + + OUTPUT_NODE = True + + CATEGORY = "_for_testing" + + def save(self, samples, filename_prefix="ldm_patched", prompt=None, extra_pnginfo=None): + full_output_folder, filename, counter, subfolder, filename_prefix = ldm_patched.utils.path_utils.get_save_image_path(filename_prefix, self.output_dir) + + # support save metadata for latent sharing + prompt_info = "" + if prompt is not None: + prompt_info = json.dumps(prompt) + + metadata = None + if not args.disable_server_info: + metadata = {"prompt": prompt_info} + if extra_pnginfo is not None: + for x in extra_pnginfo: + metadata[x] = json.dumps(extra_pnginfo[x]) + + file = f"{filename}_{counter:05}_.latent" + + results = list() + results.append({ + "filename": file, + "subfolder": subfolder, + "type": "output" + }) + + file = os.path.join(full_output_folder, file) + + output = {} + output["latent_tensor"] = samples["samples"] + output["latent_format_version_0"] = torch.tensor([]) + + ldm_patched.modules.utils.save_torch_file(output, file, metadata=metadata) + return { "ui": { "latents": results } } + + +class LoadLatent: + @classmethod + def INPUT_TYPES(s): + input_dir = ldm_patched.utils.path_utils.get_input_directory() + files = [f for f in os.listdir(input_dir) if os.path.isfile(os.path.join(input_dir, f)) and f.endswith(".latent")] + return {"required": {"latent": [sorted(files), ]}, } + + CATEGORY = "_for_testing" + + RETURN_TYPES = ("LATENT", ) + FUNCTION = "load" + + def load(self, latent): + latent_path = ldm_patched.utils.path_utils.get_annotated_filepath(latent) + latent = safetensors.torch.load_file(latent_path, device="cpu") + multiplier = 1.0 + if "latent_format_version_0" not in latent: + multiplier = 1.0 / 0.18215 + samples = {"samples": latent["latent_tensor"].float() * multiplier} + return (samples, ) + + @classmethod + def IS_CHANGED(s, latent): + image_path = ldm_patched.utils.path_utils.get_annotated_filepath(latent) + m = hashlib.sha256() + with open(image_path, 'rb') as f: + m.update(f.read()) + return m.digest().hex() + + @classmethod + def VALIDATE_INPUTS(s, latent): + if not ldm_patched.utils.path_utils.exists_annotated_filepath(latent): + return "Invalid latent file: {}".format(latent) + return True + + +class CheckpointLoader: + @classmethod + def INPUT_TYPES(s): + return {"required": { "config_name": (ldm_patched.utils.path_utils.get_filename_list("configs"), ), + "ckpt_name": (ldm_patched.utils.path_utils.get_filename_list("checkpoints"), )}} + RETURN_TYPES = ("MODEL", "CLIP", "VAE") + FUNCTION = "load_checkpoint" + + CATEGORY = "advanced/loaders" + + def load_checkpoint(self, config_name, ckpt_name, output_vae=True, output_clip=True): + config_path = ldm_patched.utils.path_utils.get_full_path("configs", config_name) + ckpt_path = ldm_patched.utils.path_utils.get_full_path("checkpoints", ckpt_name) + return ldm_patched.modules.sd.load_checkpoint(config_path, ckpt_path, output_vae=True, output_clip=True, embedding_directory=ldm_patched.utils.path_utils.get_folder_paths("embeddings")) + +class CheckpointLoaderSimple: + @classmethod + def INPUT_TYPES(s): + return {"required": { "ckpt_name": (ldm_patched.utils.path_utils.get_filename_list("checkpoints"), ), + }} + RETURN_TYPES = ("MODEL", "CLIP", "VAE") + FUNCTION = "load_checkpoint" + + CATEGORY = "loaders" + + def load_checkpoint(self, ckpt_name, output_vae=True, output_clip=True): + ckpt_path = ldm_patched.utils.path_utils.get_full_path("checkpoints", ckpt_name) + out = ldm_patched.modules.sd.load_checkpoint_guess_config(ckpt_path, output_vae=True, output_clip=True, embedding_directory=ldm_patched.utils.path_utils.get_folder_paths("embeddings")) + return out[:3] + +class DiffusersLoader: + @classmethod + def INPUT_TYPES(cls): + paths = [] + for search_path in ldm_patched.utils.path_utils.get_folder_paths("diffusers"): + if os.path.exists(search_path): + for root, subdir, files in os.walk(search_path, followlinks=True): + if "model_index.json" in files: + paths.append(os.path.relpath(root, start=search_path)) + + return {"required": {"model_path": (paths,), }} + RETURN_TYPES = ("MODEL", "CLIP", "VAE") + FUNCTION = "load_checkpoint" + + CATEGORY = "advanced/loaders/deprecated" + + def load_checkpoint(self, model_path, output_vae=True, output_clip=True): + for search_path in ldm_patched.utils.path_utils.get_folder_paths("diffusers"): + if os.path.exists(search_path): + path = os.path.join(search_path, model_path) + if os.path.exists(path): + model_path = path + break + + return ldm_patched.modules.diffusers_load.load_diffusers(model_path, output_vae=output_vae, output_clip=output_clip, embedding_directory=ldm_patched.utils.path_utils.get_folder_paths("embeddings")) + + +class unCLIPCheckpointLoader: + @classmethod + def INPUT_TYPES(s): + return {"required": { "ckpt_name": (ldm_patched.utils.path_utils.get_filename_list("checkpoints"), ), + }} + RETURN_TYPES = ("MODEL", "CLIP", "VAE", "CLIP_VISION") + FUNCTION = "load_checkpoint" + + CATEGORY = "loaders" + + def load_checkpoint(self, ckpt_name, output_vae=True, output_clip=True): + ckpt_path = ldm_patched.utils.path_utils.get_full_path("checkpoints", ckpt_name) + out = ldm_patched.modules.sd.load_checkpoint_guess_config(ckpt_path, output_vae=True, output_clip=True, output_clipvision=True, embedding_directory=ldm_patched.utils.path_utils.get_folder_paths("embeddings")) + return out + +class CLIPSetLastLayer: + @classmethod + def INPUT_TYPES(s): + return {"required": { "clip": ("CLIP", ), + "stop_at_clip_layer": ("INT", {"default": -1, "min": -24, "max": -1, "step": 1}), + }} + RETURN_TYPES = ("CLIP",) + FUNCTION = "set_last_layer" + + CATEGORY = "conditioning" + + def set_last_layer(self, clip, stop_at_clip_layer): + clip = clip.clone() + clip.clip_layer(stop_at_clip_layer) + return (clip,) + +class LoraLoader: + def __init__(self): + self.loaded_lora = None + + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "clip": ("CLIP", ), + "lora_name": (ldm_patched.utils.path_utils.get_filename_list("loras"), ), + "strength_model": ("FLOAT", {"default": 1.0, "min": -20.0, "max": 20.0, "step": 0.01}), + "strength_clip": ("FLOAT", {"default": 1.0, "min": -20.0, "max": 20.0, "step": 0.01}), + }} + RETURN_TYPES = ("MODEL", "CLIP") + FUNCTION = "load_lora" + + CATEGORY = "loaders" + + def load_lora(self, model, clip, lora_name, strength_model, strength_clip): + if strength_model == 0 and strength_clip == 0: + return (model, clip) + + lora_path = ldm_patched.utils.path_utils.get_full_path("loras", lora_name) + lora = None + if self.loaded_lora is not None: + if self.loaded_lora[0] == lora_path: + lora = self.loaded_lora[1] + else: + temp = self.loaded_lora + self.loaded_lora = None + del temp + + if lora is None: + lora = ldm_patched.modules.utils.load_torch_file(lora_path, safe_load=True) + self.loaded_lora = (lora_path, lora) + + model_lora, clip_lora = ldm_patched.modules.sd.load_lora_for_models(model, clip, lora, strength_model, strength_clip) + return (model_lora, clip_lora) + +class LoraLoaderModelOnly(LoraLoader): + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "lora_name": (ldm_patched.utils.path_utils.get_filename_list("loras"), ), + "strength_model": ("FLOAT", {"default": 1.0, "min": -20.0, "max": 20.0, "step": 0.01}), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "load_lora_model_only" + + def load_lora_model_only(self, model, lora_name, strength_model): + return (self.load_lora(model, None, lora_name, strength_model, 0)[0],) + +class VAELoader: + @staticmethod + def vae_list(): + vaes = ldm_patched.utils.path_utils.get_filename_list("vae") + approx_vaes = ldm_patched.utils.path_utils.get_filename_list("vae_approx") + sdxl_taesd_enc = False + sdxl_taesd_dec = False + sd1_taesd_enc = False + sd1_taesd_dec = False + + for v in approx_vaes: + if v.startswith("taesd_decoder."): + sd1_taesd_dec = True + elif v.startswith("taesd_encoder."): + sd1_taesd_enc = True + elif v.startswith("taesdxl_decoder."): + sdxl_taesd_dec = True + elif v.startswith("taesdxl_encoder."): + sdxl_taesd_enc = True + if sd1_taesd_dec and sd1_taesd_enc: + vaes.append("taesd") + if sdxl_taesd_dec and sdxl_taesd_enc: + vaes.append("taesdxl") + return vaes + + @staticmethod + def load_taesd(name): + sd = {} + approx_vaes = ldm_patched.utils.path_utils.get_filename_list("vae_approx") + + encoder = next(filter(lambda a: a.startswith("{}_encoder.".format(name)), approx_vaes)) + decoder = next(filter(lambda a: a.startswith("{}_decoder.".format(name)), approx_vaes)) + + enc = ldm_patched.modules.utils.load_torch_file(ldm_patched.utils.path_utils.get_full_path("vae_approx", encoder)) + for k in enc: + sd["taesd_encoder.{}".format(k)] = enc[k] + + dec = ldm_patched.modules.utils.load_torch_file(ldm_patched.utils.path_utils.get_full_path("vae_approx", decoder)) + for k in dec: + sd["taesd_decoder.{}".format(k)] = dec[k] + + if name == "taesd": + sd["vae_scale"] = torch.tensor(0.18215) + elif name == "taesdxl": + sd["vae_scale"] = torch.tensor(0.13025) + return sd + + @classmethod + def INPUT_TYPES(s): + return {"required": { "vae_name": (s.vae_list(), )}} + RETURN_TYPES = ("VAE",) + FUNCTION = "load_vae" + + CATEGORY = "loaders" + + #TODO: scale factor? + def load_vae(self, vae_name): + if vae_name in ["taesd", "taesdxl"]: + sd = self.load_taesd(vae_name) + else: + vae_path = ldm_patched.utils.path_utils.get_full_path("vae", vae_name) + sd = ldm_patched.modules.utils.load_torch_file(vae_path) + vae = ldm_patched.modules.sd.VAE(sd=sd) + return (vae,) + +class ControlNetLoader: + @classmethod + def INPUT_TYPES(s): + return {"required": { "control_net_name": (ldm_patched.utils.path_utils.get_filename_list("controlnet"), )}} + + RETURN_TYPES = ("CONTROL_NET",) + FUNCTION = "load_controlnet" + + CATEGORY = "loaders" + + def load_controlnet(self, control_net_name): + controlnet_path = ldm_patched.utils.path_utils.get_full_path("controlnet", control_net_name) + controlnet = ldm_patched.modules.controlnet.load_controlnet(controlnet_path) + return (controlnet,) + +class DiffControlNetLoader: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "control_net_name": (ldm_patched.utils.path_utils.get_filename_list("controlnet"), )}} + + RETURN_TYPES = ("CONTROL_NET",) + FUNCTION = "load_controlnet" + + CATEGORY = "loaders" + + def load_controlnet(self, model, control_net_name): + controlnet_path = ldm_patched.utils.path_utils.get_full_path("controlnet", control_net_name) + controlnet = ldm_patched.modules.controlnet.load_controlnet(controlnet_path, model) + return (controlnet,) + + +class ControlNetApply: + @classmethod + def INPUT_TYPES(s): + return {"required": {"conditioning": ("CONDITIONING", ), + "control_net": ("CONTROL_NET", ), + "image": ("IMAGE", ), + "strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}) + }} + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "apply_controlnet" + + CATEGORY = "conditioning" + + def apply_controlnet(self, conditioning, control_net, image, strength): + if strength == 0: + return (conditioning, ) + + c = [] + control_hint = image.movedim(-1,1) + for t in conditioning: + n = [t[0], t[1].copy()] + c_net = control_net.copy().set_cond_hint(control_hint, strength) + if 'control' in t[1]: + c_net.set_previous_controlnet(t[1]['control']) + n[1]['control'] = c_net + n[1]['control_apply_to_uncond'] = True + c.append(n) + return (c, ) + + +class ControlNetApplyAdvanced: + @classmethod + def INPUT_TYPES(s): + return {"required": {"positive": ("CONDITIONING", ), + "negative": ("CONDITIONING", ), + "control_net": ("CONTROL_NET", ), + "image": ("IMAGE", ), + "strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "start_percent": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001}), + "end_percent": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001}) + }} + + RETURN_TYPES = ("CONDITIONING","CONDITIONING") + RETURN_NAMES = ("positive", "negative") + FUNCTION = "apply_controlnet" + + CATEGORY = "conditioning" + + def apply_controlnet(self, positive, negative, control_net, image, strength, start_percent, end_percent): + if strength == 0: + return (positive, negative) + + control_hint = image.movedim(-1,1) + cnets = {} + + out = [] + for conditioning in [positive, negative]: + c = [] + for t in conditioning: + d = t[1].copy() + + prev_cnet = d.get('control', None) + if prev_cnet in cnets: + c_net = cnets[prev_cnet] + else: + c_net = control_net.copy().set_cond_hint(control_hint, strength, (start_percent, end_percent)) + c_net.set_previous_controlnet(prev_cnet) + cnets[prev_cnet] = c_net + + d['control'] = c_net + d['control_apply_to_uncond'] = False + n = [t[0], d] + c.append(n) + out.append(c) + return (out[0], out[1]) + + +class UNETLoader: + @classmethod + def INPUT_TYPES(s): + return {"required": { "unet_name": (ldm_patched.utils.path_utils.get_filename_list("unet"), ), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "load_unet" + + CATEGORY = "advanced/loaders" + + def load_unet(self, unet_name): + unet_path = ldm_patched.utils.path_utils.get_full_path("unet", unet_name) + model = ldm_patched.modules.sd.load_unet(unet_path) + return (model,) + +class CLIPLoader: + @classmethod + def INPUT_TYPES(s): + return {"required": { "clip_name": (ldm_patched.utils.path_utils.get_filename_list("clip"), ), + }} + RETURN_TYPES = ("CLIP",) + FUNCTION = "load_clip" + + CATEGORY = "advanced/loaders" + + def load_clip(self, clip_name): + clip_path = ldm_patched.utils.path_utils.get_full_path("clip", clip_name) + clip = ldm_patched.modules.sd.load_clip(ckpt_paths=[clip_path], embedding_directory=ldm_patched.utils.path_utils.get_folder_paths("embeddings")) + return (clip,) + +class DualCLIPLoader: + @classmethod + def INPUT_TYPES(s): + return {"required": { "clip_name1": (ldm_patched.utils.path_utils.get_filename_list("clip"), ), "clip_name2": (ldm_patched.utils.path_utils.get_filename_list("clip"), ), + }} + RETURN_TYPES = ("CLIP",) + FUNCTION = "load_clip" + + CATEGORY = "advanced/loaders" + + def load_clip(self, clip_name1, clip_name2): + clip_path1 = ldm_patched.utils.path_utils.get_full_path("clip", clip_name1) + clip_path2 = ldm_patched.utils.path_utils.get_full_path("clip", clip_name2) + clip = ldm_patched.modules.sd.load_clip(ckpt_paths=[clip_path1, clip_path2], embedding_directory=ldm_patched.utils.path_utils.get_folder_paths("embeddings")) + return (clip,) + +class CLIPVisionLoader: + @classmethod + def INPUT_TYPES(s): + return {"required": { "clip_name": (ldm_patched.utils.path_utils.get_filename_list("clip_vision"), ), + }} + RETURN_TYPES = ("CLIP_VISION",) + FUNCTION = "load_clip" + + CATEGORY = "loaders" + + def load_clip(self, clip_name): + clip_path = ldm_patched.utils.path_utils.get_full_path("clip_vision", clip_name) + clip_vision = ldm_patched.modules.clip_vision.load(clip_path) + return (clip_vision,) + +class CLIPVisionEncode: + @classmethod + def INPUT_TYPES(s): + return {"required": { "clip_vision": ("CLIP_VISION",), + "image": ("IMAGE",) + }} + RETURN_TYPES = ("CLIP_VISION_OUTPUT",) + FUNCTION = "encode" + + CATEGORY = "conditioning" + + def encode(self, clip_vision, image): + output = clip_vision.encode_image(image) + return (output,) + +class StyleModelLoader: + @classmethod + def INPUT_TYPES(s): + return {"required": { "style_model_name": (ldm_patched.utils.path_utils.get_filename_list("style_models"), )}} + + RETURN_TYPES = ("STYLE_MODEL",) + FUNCTION = "load_style_model" + + CATEGORY = "loaders" + + def load_style_model(self, style_model_name): + style_model_path = ldm_patched.utils.path_utils.get_full_path("style_models", style_model_name) + style_model = ldm_patched.modules.sd.load_style_model(style_model_path) + return (style_model,) + + +class StyleModelApply: + @classmethod + def INPUT_TYPES(s): + return {"required": {"conditioning": ("CONDITIONING", ), + "style_model": ("STYLE_MODEL", ), + "clip_vision_output": ("CLIP_VISION_OUTPUT", ), + }} + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "apply_stylemodel" + + CATEGORY = "conditioning/style_model" + + def apply_stylemodel(self, clip_vision_output, style_model, conditioning): + cond = style_model.get_cond(clip_vision_output).flatten(start_dim=0, end_dim=1).unsqueeze(dim=0) + c = [] + for t in conditioning: + n = [torch.cat((t[0], cond), dim=1), t[1].copy()] + c.append(n) + return (c, ) + +class unCLIPConditioning: + @classmethod + def INPUT_TYPES(s): + return {"required": {"conditioning": ("CONDITIONING", ), + "clip_vision_output": ("CLIP_VISION_OUTPUT", ), + "strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), + "noise_augmentation": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.01}), + }} + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "apply_adm" + + CATEGORY = "conditioning" + + def apply_adm(self, conditioning, clip_vision_output, strength, noise_augmentation): + if strength == 0: + return (conditioning, ) + + c = [] + for t in conditioning: + o = t[1].copy() + x = {"clip_vision_output": clip_vision_output, "strength": strength, "noise_augmentation": noise_augmentation} + if "unclip_conditioning" in o: + o["unclip_conditioning"] = o["unclip_conditioning"][:] + [x] + else: + o["unclip_conditioning"] = [x] + n = [t[0], o] + c.append(n) + return (c, ) + +class GLIGENLoader: + @classmethod + def INPUT_TYPES(s): + return {"required": { "gligen_name": (ldm_patched.utils.path_utils.get_filename_list("gligen"), )}} + + RETURN_TYPES = ("GLIGEN",) + FUNCTION = "load_gligen" + + CATEGORY = "loaders" + + def load_gligen(self, gligen_name): + gligen_path = ldm_patched.utils.path_utils.get_full_path("gligen", gligen_name) + gligen = ldm_patched.modules.sd.load_gligen(gligen_path) + return (gligen,) + +class GLIGENTextBoxApply: + @classmethod + def INPUT_TYPES(s): + return {"required": {"conditioning_to": ("CONDITIONING", ), + "clip": ("CLIP", ), + "gligen_textbox_model": ("GLIGEN", ), + "text": ("STRING", {"multiline": True}), + "width": ("INT", {"default": 64, "min": 8, "max": MAX_RESOLUTION, "step": 8}), + "height": ("INT", {"default": 64, "min": 8, "max": MAX_RESOLUTION, "step": 8}), + "x": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), + "y": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), + }} + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "append" + + CATEGORY = "conditioning/gligen" + + def append(self, conditioning_to, clip, gligen_textbox_model, text, width, height, x, y): + c = [] + cond, cond_pooled = clip.encode_from_tokens(clip.tokenize(text), return_pooled=True) + for t in conditioning_to: + n = [t[0], t[1].copy()] + position_params = [(cond_pooled, height // 8, width // 8, y // 8, x // 8)] + prev = [] + if "gligen" in n[1]: + prev = n[1]['gligen'][2] + + n[1]['gligen'] = ("position", gligen_textbox_model, prev + position_params) + c.append(n) + return (c, ) + +class EmptyLatentImage: + def __init__(self): + self.device = ldm_patched.modules.model_management.intermediate_device() + + @classmethod + def INPUT_TYPES(s): + return {"required": { "width": ("INT", {"default": 512, "min": 16, "max": MAX_RESOLUTION, "step": 8}), + "height": ("INT", {"default": 512, "min": 16, "max": MAX_RESOLUTION, "step": 8}), + "batch_size": ("INT", {"default": 1, "min": 1, "max": 4096})}} + RETURN_TYPES = ("LATENT",) + FUNCTION = "generate" + + CATEGORY = "latent" + + def generate(self, width, height, batch_size=1): + latent = torch.zeros([batch_size, 4, height // 8, width // 8], device=self.device) + return ({"samples":latent}, ) + + +class LatentFromBatch: + @classmethod + def INPUT_TYPES(s): + return {"required": { "samples": ("LATENT",), + "batch_index": ("INT", {"default": 0, "min": 0, "max": 63}), + "length": ("INT", {"default": 1, "min": 1, "max": 64}), + }} + RETURN_TYPES = ("LATENT",) + FUNCTION = "frombatch" + + CATEGORY = "latent/batch" + + def frombatch(self, samples, batch_index, length): + s = samples.copy() + s_in = samples["samples"] + batch_index = min(s_in.shape[0] - 1, batch_index) + length = min(s_in.shape[0] - batch_index, length) + s["samples"] = s_in[batch_index:batch_index + length].clone() + if "noise_mask" in samples: + masks = samples["noise_mask"] + if masks.shape[0] == 1: + s["noise_mask"] = masks.clone() + else: + if masks.shape[0] < s_in.shape[0]: + masks = masks.repeat(math.ceil(s_in.shape[0] / masks.shape[0]), 1, 1, 1)[:s_in.shape[0]] + s["noise_mask"] = masks[batch_index:batch_index + length].clone() + if "batch_index" not in s: + s["batch_index"] = [x for x in range(batch_index, batch_index+length)] + else: + s["batch_index"] = samples["batch_index"][batch_index:batch_index + length] + return (s,) + +class RepeatLatentBatch: + @classmethod + def INPUT_TYPES(s): + return {"required": { "samples": ("LATENT",), + "amount": ("INT", {"default": 1, "min": 1, "max": 64}), + }} + RETURN_TYPES = ("LATENT",) + FUNCTION = "repeat" + + CATEGORY = "latent/batch" + + def repeat(self, samples, amount): + s = samples.copy() + s_in = samples["samples"] + + s["samples"] = s_in.repeat((amount, 1,1,1)) + if "noise_mask" in samples and samples["noise_mask"].shape[0] > 1: + masks = samples["noise_mask"] + if masks.shape[0] < s_in.shape[0]: + masks = masks.repeat(math.ceil(s_in.shape[0] / masks.shape[0]), 1, 1, 1)[:s_in.shape[0]] + s["noise_mask"] = samples["noise_mask"].repeat((amount, 1,1,1)) + if "batch_index" in s: + offset = max(s["batch_index"]) - min(s["batch_index"]) + 1 + s["batch_index"] = s["batch_index"] + [x + (i * offset) for i in range(1, amount) for x in s["batch_index"]] + return (s,) + +class LatentUpscale: + upscale_methods = ["nearest-exact", "bilinear", "area", "bicubic", "bislerp"] + crop_methods = ["disabled", "center"] + + @classmethod + def INPUT_TYPES(s): + return {"required": { "samples": ("LATENT",), "upscale_method": (s.upscale_methods,), + "width": ("INT", {"default": 512, "min": 0, "max": MAX_RESOLUTION, "step": 8}), + "height": ("INT", {"default": 512, "min": 0, "max": MAX_RESOLUTION, "step": 8}), + "crop": (s.crop_methods,)}} + RETURN_TYPES = ("LATENT",) + FUNCTION = "upscale" + + CATEGORY = "latent" + + def upscale(self, samples, upscale_method, width, height, crop): + if width == 0 and height == 0: + s = samples + else: + s = samples.copy() + + if width == 0: + height = max(64, height) + width = max(64, round(samples["samples"].shape[3] * height / samples["samples"].shape[2])) + elif height == 0: + width = max(64, width) + height = max(64, round(samples["samples"].shape[2] * width / samples["samples"].shape[3])) + else: + width = max(64, width) + height = max(64, height) + + s["samples"] = ldm_patched.modules.utils.common_upscale(samples["samples"], width // 8, height // 8, upscale_method, crop) + return (s,) + +class LatentUpscaleBy: + upscale_methods = ["nearest-exact", "bilinear", "area", "bicubic", "bislerp"] + + @classmethod + def INPUT_TYPES(s): + return {"required": { "samples": ("LATENT",), "upscale_method": (s.upscale_methods,), + "scale_by": ("FLOAT", {"default": 1.5, "min": 0.01, "max": 8.0, "step": 0.01}),}} + RETURN_TYPES = ("LATENT",) + FUNCTION = "upscale" + + CATEGORY = "latent" + + def upscale(self, samples, upscale_method, scale_by): + s = samples.copy() + width = round(samples["samples"].shape[3] * scale_by) + height = round(samples["samples"].shape[2] * scale_by) + s["samples"] = ldm_patched.modules.utils.common_upscale(samples["samples"], width, height, upscale_method, "disabled") + return (s,) + +class LatentRotate: + @classmethod + def INPUT_TYPES(s): + return {"required": { "samples": ("LATENT",), + "rotation": (["none", "90 degrees", "180 degrees", "270 degrees"],), + }} + RETURN_TYPES = ("LATENT",) + FUNCTION = "rotate" + + CATEGORY = "latent/transform" + + def rotate(self, samples, rotation): + s = samples.copy() + rotate_by = 0 + if rotation.startswith("90"): + rotate_by = 1 + elif rotation.startswith("180"): + rotate_by = 2 + elif rotation.startswith("270"): + rotate_by = 3 + + s["samples"] = torch.rot90(samples["samples"], k=rotate_by, dims=[3, 2]) + return (s,) + +class LatentFlip: + @classmethod + def INPUT_TYPES(s): + return {"required": { "samples": ("LATENT",), + "flip_method": (["x-axis: vertically", "y-axis: horizontally"],), + }} + RETURN_TYPES = ("LATENT",) + FUNCTION = "flip" + + CATEGORY = "latent/transform" + + def flip(self, samples, flip_method): + s = samples.copy() + if flip_method.startswith("x"): + s["samples"] = torch.flip(samples["samples"], dims=[2]) + elif flip_method.startswith("y"): + s["samples"] = torch.flip(samples["samples"], dims=[3]) + + return (s,) + +class LatentComposite: + @classmethod + def INPUT_TYPES(s): + return {"required": { "samples_to": ("LATENT",), + "samples_from": ("LATENT",), + "x": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), + "y": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), + "feather": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), + }} + RETURN_TYPES = ("LATENT",) + FUNCTION = "composite" + + CATEGORY = "latent" + + def composite(self, samples_to, samples_from, x, y, composite_method="normal", feather=0): + x = x // 8 + y = y // 8 + feather = feather // 8 + samples_out = samples_to.copy() + s = samples_to["samples"].clone() + samples_to = samples_to["samples"] + samples_from = samples_from["samples"] + if feather == 0: + s[:,:,y:y+samples_from.shape[2],x:x+samples_from.shape[3]] = samples_from[:,:,:samples_to.shape[2] - y, :samples_to.shape[3] - x] + else: + samples_from = samples_from[:,:,:samples_to.shape[2] - y, :samples_to.shape[3] - x] + mask = torch.ones_like(samples_from) + for t in range(feather): + if y != 0: + mask[:,:,t:1+t,:] *= ((1.0/feather) * (t + 1)) + + if y + samples_from.shape[2] < samples_to.shape[2]: + mask[:,:,mask.shape[2] -1 -t: mask.shape[2]-t,:] *= ((1.0/feather) * (t + 1)) + if x != 0: + mask[:,:,:,t:1+t] *= ((1.0/feather) * (t + 1)) + if x + samples_from.shape[3] < samples_to.shape[3]: + mask[:,:,:,mask.shape[3]- 1 - t: mask.shape[3]- t] *= ((1.0/feather) * (t + 1)) + rev_mask = torch.ones_like(mask) - mask + s[:,:,y:y+samples_from.shape[2],x:x+samples_from.shape[3]] = samples_from[:,:,:samples_to.shape[2] - y, :samples_to.shape[3] - x] * mask + s[:,:,y:y+samples_from.shape[2],x:x+samples_from.shape[3]] * rev_mask + samples_out["samples"] = s + return (samples_out,) + +class LatentBlend: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "samples1": ("LATENT",), + "samples2": ("LATENT",), + "blend_factor": ("FLOAT", { + "default": 0.5, + "min": 0, + "max": 1, + "step": 0.01 + }), + }} + + RETURN_TYPES = ("LATENT",) + FUNCTION = "blend" + + CATEGORY = "_for_testing" + + def blend(self, samples1, samples2, blend_factor:float, blend_mode: str="normal"): + + samples_out = samples1.copy() + samples1 = samples1["samples"] + samples2 = samples2["samples"] + + if samples1.shape != samples2.shape: + samples2.permute(0, 3, 1, 2) + samples2 = ldm_patched.modules.utils.common_upscale(samples2, samples1.shape[3], samples1.shape[2], 'bicubic', crop='center') + samples2.permute(0, 2, 3, 1) + + samples_blended = self.blend_mode(samples1, samples2, blend_mode) + samples_blended = samples1 * blend_factor + samples_blended * (1 - blend_factor) + samples_out["samples"] = samples_blended + return (samples_out,) + + def blend_mode(self, img1, img2, mode): + if mode == "normal": + return img2 + else: + raise ValueError(f"Unsupported blend mode: {mode}") + +class LatentCrop: + @classmethod + def INPUT_TYPES(s): + return {"required": { "samples": ("LATENT",), + "width": ("INT", {"default": 512, "min": 64, "max": MAX_RESOLUTION, "step": 8}), + "height": ("INT", {"default": 512, "min": 64, "max": MAX_RESOLUTION, "step": 8}), + "x": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), + "y": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), + }} + RETURN_TYPES = ("LATENT",) + FUNCTION = "crop" + + CATEGORY = "latent/transform" + + def crop(self, samples, width, height, x, y): + s = samples.copy() + samples = samples['samples'] + x = x // 8 + y = y // 8 + + #enfonce minimum size of 64 + if x > (samples.shape[3] - 8): + x = samples.shape[3] - 8 + if y > (samples.shape[2] - 8): + y = samples.shape[2] - 8 + + new_height = height // 8 + new_width = width // 8 + to_x = new_width + x + to_y = new_height + y + s['samples'] = samples[:,:,y:to_y, x:to_x] + return (s,) + +class SetLatentNoiseMask: + @classmethod + def INPUT_TYPES(s): + return {"required": { "samples": ("LATENT",), + "mask": ("MASK",), + }} + RETURN_TYPES = ("LATENT",) + FUNCTION = "set_mask" + + CATEGORY = "latent/inpaint" + + def set_mask(self, samples, mask): + s = samples.copy() + s["noise_mask"] = mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])) + return (s,) + +def common_ksampler(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, latent, denoise=1.0, disable_noise=False, start_step=None, last_step=None, force_full_denoise=False): + latent_image = latent["samples"] + if disable_noise: + noise = torch.zeros(latent_image.size(), dtype=latent_image.dtype, layout=latent_image.layout, device="cpu") + else: + batch_inds = latent["batch_index"] if "batch_index" in latent else None + noise = ldm_patched.modules.sample.prepare_noise(latent_image, seed, batch_inds) + + noise_mask = None + if "noise_mask" in latent: + noise_mask = latent["noise_mask"] + + callback = ldm_patched.utils.latent_visualization.prepare_callback(model, steps) + disable_pbar = not ldm_patched.modules.utils.PROGRESS_BAR_ENABLED + samples = ldm_patched.modules.sample.sample(model, noise, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, + denoise=denoise, disable_noise=disable_noise, start_step=start_step, last_step=last_step, + force_full_denoise=force_full_denoise, noise_mask=noise_mask, callback=callback, disable_pbar=disable_pbar, seed=seed) + out = latent.copy() + out["samples"] = samples + return (out, ) + +class KSampler: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"model": ("MODEL",), + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0, "step":0.1, "round": 0.01}), + "sampler_name": (ldm_patched.modules.samplers.KSampler.SAMPLERS, ), + "scheduler": (ldm_patched.modules.samplers.KSampler.SCHEDULERS, ), + "positive": ("CONDITIONING", ), + "negative": ("CONDITIONING", ), + "latent_image": ("LATENT", ), + "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + } + } + + RETURN_TYPES = ("LATENT",) + FUNCTION = "sample" + + CATEGORY = "sampling" + + def sample(self, model, seed, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, denoise=1.0): + return common_ksampler(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, denoise=denoise) + +class KSamplerAdvanced: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"model": ("MODEL",), + "add_noise": (["enable", "disable"], ), + "noise_seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0, "step":0.1, "round": 0.01}), + "sampler_name": (ldm_patched.modules.samplers.KSampler.SAMPLERS, ), + "scheduler": (ldm_patched.modules.samplers.KSampler.SCHEDULERS, ), + "positive": ("CONDITIONING", ), + "negative": ("CONDITIONING", ), + "latent_image": ("LATENT", ), + "start_at_step": ("INT", {"default": 0, "min": 0, "max": 10000}), + "end_at_step": ("INT", {"default": 10000, "min": 0, "max": 10000}), + "return_with_leftover_noise": (["disable", "enable"], ), + } + } + + RETURN_TYPES = ("LATENT",) + FUNCTION = "sample" + + CATEGORY = "sampling" + + def sample(self, model, add_noise, noise_seed, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, start_at_step, end_at_step, return_with_leftover_noise, denoise=1.0): + force_full_denoise = True + if return_with_leftover_noise == "enable": + force_full_denoise = False + disable_noise = False + if add_noise == "disable": + disable_noise = True + return common_ksampler(model, noise_seed, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, denoise=denoise, disable_noise=disable_noise, start_step=start_at_step, last_step=end_at_step, force_full_denoise=force_full_denoise) + +class SaveImage: + def __init__(self): + self.output_dir = ldm_patched.utils.path_utils.get_output_directory() + self.type = "output" + self.prefix_append = "" + self.compress_level = 4 + + @classmethod + def INPUT_TYPES(s): + return {"required": + {"images": ("IMAGE", ), + "filename_prefix": ("STRING", {"default": "ldm_patched"})}, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"}, + } + + RETURN_TYPES = () + FUNCTION = "save_images" + + OUTPUT_NODE = True + + CATEGORY = "image" + + def save_images(self, images, filename_prefix="ldm_patched", prompt=None, extra_pnginfo=None): + filename_prefix += self.prefix_append + full_output_folder, filename, counter, subfolder, filename_prefix = ldm_patched.utils.path_utils.get_save_image_path(filename_prefix, self.output_dir, images[0].shape[1], images[0].shape[0]) + results = list() + for image in images: + i = 255. * image.cpu().numpy() + img = Image.fromarray(np.clip(i, 0, 255).astype(np.uint8)) + metadata = None + if not args.disable_server_info: + metadata = PngInfo() + if prompt is not None: + metadata.add_text("prompt", json.dumps(prompt)) + if extra_pnginfo is not None: + for x in extra_pnginfo: + metadata.add_text(x, json.dumps(extra_pnginfo[x])) + + file = f"{filename}_{counter:05}_.png" + img.save(os.path.join(full_output_folder, file), pnginfo=metadata, compress_level=self.compress_level) + results.append({ + "filename": file, + "subfolder": subfolder, + "type": self.type + }) + counter += 1 + + return { "ui": { "images": results } } + +class PreviewImage(SaveImage): + def __init__(self): + self.output_dir = ldm_patched.utils.path_utils.get_temp_directory() + self.type = "temp" + self.prefix_append = "_temp_" + ''.join(random.choice("abcdefghijklmnopqrstupvxyz") for x in range(5)) + self.compress_level = 1 + + @classmethod + def INPUT_TYPES(s): + return {"required": + {"images": ("IMAGE", ), }, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"}, + } + +class LoadImage: + @classmethod + def INPUT_TYPES(s): + input_dir = ldm_patched.utils.path_utils.get_input_directory() + files = [f for f in os.listdir(input_dir) if os.path.isfile(os.path.join(input_dir, f))] + return {"required": + {"image": (sorted(files), {"image_upload": True})}, + } + + CATEGORY = "image" + + RETURN_TYPES = ("IMAGE", "MASK") + FUNCTION = "load_image" + def load_image(self, image): + image_path = ldm_patched.utils.path_utils.get_annotated_filepath(image) + img = Image.open(image_path) + output_images = [] + output_masks = [] + for i in ImageSequence.Iterator(img): + i = ImageOps.exif_transpose(i) + if i.mode == 'I': + i = i.point(lambda i: i * (1 / 255)) + image = i.convert("RGB") + image = np.array(image).astype(np.float32) / 255.0 + image = torch.from_numpy(image)[None,] + if 'A' in i.getbands(): + mask = np.array(i.getchannel('A')).astype(np.float32) / 255.0 + mask = 1. - torch.from_numpy(mask) + else: + mask = torch.zeros((64,64), dtype=torch.float32, device="cpu") + output_images.append(image) + output_masks.append(mask.unsqueeze(0)) + + if len(output_images) > 1: + output_image = torch.cat(output_images, dim=0) + output_mask = torch.cat(output_masks, dim=0) + else: + output_image = output_images[0] + output_mask = output_masks[0] + + return (output_image, output_mask) + + @classmethod + def IS_CHANGED(s, image): + image_path = ldm_patched.utils.path_utils.get_annotated_filepath(image) + m = hashlib.sha256() + with open(image_path, 'rb') as f: + m.update(f.read()) + return m.digest().hex() + + @classmethod + def VALIDATE_INPUTS(s, image): + if not ldm_patched.utils.path_utils.exists_annotated_filepath(image): + return "Invalid image file: {}".format(image) + + return True + +class LoadImageMask: + _color_channels = ["alpha", "red", "green", "blue"] + @classmethod + def INPUT_TYPES(s): + input_dir = ldm_patched.utils.path_utils.get_input_directory() + files = [f for f in os.listdir(input_dir) if os.path.isfile(os.path.join(input_dir, f))] + return {"required": + {"image": (sorted(files), {"image_upload": True}), + "channel": (s._color_channels, ), } + } + + CATEGORY = "mask" + + RETURN_TYPES = ("MASK",) + FUNCTION = "load_image" + def load_image(self, image, channel): + image_path = ldm_patched.utils.path_utils.get_annotated_filepath(image) + i = Image.open(image_path) + i = ImageOps.exif_transpose(i) + if i.getbands() != ("R", "G", "B", "A"): + if i.mode == 'I': + i = i.point(lambda i: i * (1 / 255)) + i = i.convert("RGBA") + mask = None + c = channel[0].upper() + if c in i.getbands(): + mask = np.array(i.getchannel(c)).astype(np.float32) / 255.0 + mask = torch.from_numpy(mask) + if c == 'A': + mask = 1. - mask + else: + mask = torch.zeros((64,64), dtype=torch.float32, device="cpu") + return (mask.unsqueeze(0),) + + @classmethod + def IS_CHANGED(s, image, channel): + image_path = ldm_patched.utils.path_utils.get_annotated_filepath(image) + m = hashlib.sha256() + with open(image_path, 'rb') as f: + m.update(f.read()) + return m.digest().hex() + + @classmethod + def VALIDATE_INPUTS(s, image): + if not ldm_patched.utils.path_utils.exists_annotated_filepath(image): + return "Invalid image file: {}".format(image) + + return True + +class ImageScale: + upscale_methods = ["nearest-exact", "bilinear", "area", "bicubic", "lanczos"] + crop_methods = ["disabled", "center"] + + @classmethod + def INPUT_TYPES(s): + return {"required": { "image": ("IMAGE",), "upscale_method": (s.upscale_methods,), + "width": ("INT", {"default": 512, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + "height": ("INT", {"default": 512, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + "crop": (s.crop_methods,)}} + RETURN_TYPES = ("IMAGE",) + FUNCTION = "upscale" + + CATEGORY = "image/upscaling" + + def upscale(self, image, upscale_method, width, height, crop): + if width == 0 and height == 0: + s = image + else: + samples = image.movedim(-1,1) + + if width == 0: + width = max(1, round(samples.shape[3] * height / samples.shape[2])) + elif height == 0: + height = max(1, round(samples.shape[2] * width / samples.shape[3])) + + s = ldm_patched.modules.utils.common_upscale(samples, width, height, upscale_method, crop) + s = s.movedim(1,-1) + return (s,) + +class ImageScaleBy: + upscale_methods = ["nearest-exact", "bilinear", "area", "bicubic", "lanczos"] + + @classmethod + def INPUT_TYPES(s): + return {"required": { "image": ("IMAGE",), "upscale_method": (s.upscale_methods,), + "scale_by": ("FLOAT", {"default": 1.0, "min": 0.01, "max": 8.0, "step": 0.01}),}} + RETURN_TYPES = ("IMAGE",) + FUNCTION = "upscale" + + CATEGORY = "image/upscaling" + + def upscale(self, image, upscale_method, scale_by): + samples = image.movedim(-1,1) + width = round(samples.shape[3] * scale_by) + height = round(samples.shape[2] * scale_by) + s = ldm_patched.modules.utils.common_upscale(samples, width, height, upscale_method, "disabled") + s = s.movedim(1,-1) + return (s,) + +class ImageInvert: + + @classmethod + def INPUT_TYPES(s): + return {"required": { "image": ("IMAGE",)}} + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "invert" + + CATEGORY = "image" + + def invert(self, image): + s = 1.0 - image + return (s,) + +class ImageBatch: + + @classmethod + def INPUT_TYPES(s): + return {"required": { "image1": ("IMAGE",), "image2": ("IMAGE",)}} + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "batch" + + CATEGORY = "image" + + def batch(self, image1, image2): + if image1.shape[1:] != image2.shape[1:]: + image2 = ldm_patched.modules.utils.common_upscale(image2.movedim(-1,1), image1.shape[2], image1.shape[1], "bilinear", "center").movedim(1,-1) + s = torch.cat((image1, image2), dim=0) + return (s,) + +class EmptyImage: + def __init__(self, device="cpu"): + self.device = device + + @classmethod + def INPUT_TYPES(s): + return {"required": { "width": ("INT", {"default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1}), + "height": ("INT", {"default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1}), + "batch_size": ("INT", {"default": 1, "min": 1, "max": 4096}), + "color": ("INT", {"default": 0, "min": 0, "max": 0xFFFFFF, "step": 1, "display": "color"}), + }} + RETURN_TYPES = ("IMAGE",) + FUNCTION = "generate" + + CATEGORY = "image" + + def generate(self, width, height, batch_size=1, color=0): + r = torch.full([batch_size, height, width, 1], ((color >> 16) & 0xFF) / 0xFF) + g = torch.full([batch_size, height, width, 1], ((color >> 8) & 0xFF) / 0xFF) + b = torch.full([batch_size, height, width, 1], ((color) & 0xFF) / 0xFF) + return (torch.cat((r, g, b), dim=-1), ) + +class ImagePadForOutpaint: + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "left": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), + "top": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), + "right": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), + "bottom": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), + "feathering": ("INT", {"default": 40, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + } + } + + RETURN_TYPES = ("IMAGE", "MASK") + FUNCTION = "expand_image" + + CATEGORY = "image" + + def expand_image(self, image, left, top, right, bottom, feathering): + d1, d2, d3, d4 = image.size() + + new_image = torch.ones( + (d1, d2 + top + bottom, d3 + left + right, d4), + dtype=torch.float32, + ) * 0.5 + + new_image[:, top:top + d2, left:left + d3, :] = image + + mask = torch.ones( + (d2 + top + bottom, d3 + left + right), + dtype=torch.float32, + ) + + t = torch.zeros( + (d2, d3), + dtype=torch.float32 + ) + + if feathering > 0 and feathering * 2 < d2 and feathering * 2 < d3: + + for i in range(d2): + for j in range(d3): + dt = i if top != 0 else d2 + db = d2 - i if bottom != 0 else d2 + + dl = j if left != 0 else d3 + dr = d3 - j if right != 0 else d3 + + d = min(dt, db, dl, dr) + + if d >= feathering: + continue + + v = (feathering - d) / feathering + + t[i, j] = v * v + + mask[top:top + d2, left:left + d3] = t + + return (new_image, mask) + + +NODE_CLASS_MAPPINGS = { + "KSampler": KSampler, + "CheckpointLoaderSimple": CheckpointLoaderSimple, + "CLIPTextEncode": CLIPTextEncode, + "CLIPSetLastLayer": CLIPSetLastLayer, + "VAEDecode": VAEDecode, + "VAEEncode": VAEEncode, + "VAEEncodeForInpaint": VAEEncodeForInpaint, + "VAELoader": VAELoader, + "EmptyLatentImage": EmptyLatentImage, + "LatentUpscale": LatentUpscale, + "LatentUpscaleBy": LatentUpscaleBy, + "LatentFromBatch": LatentFromBatch, + "RepeatLatentBatch": RepeatLatentBatch, + "SaveImage": SaveImage, + "PreviewImage": PreviewImage, + "LoadImage": LoadImage, + "LoadImageMask": LoadImageMask, + "ImageScale": ImageScale, + "ImageScaleBy": ImageScaleBy, + "ImageInvert": ImageInvert, + "ImageBatch": ImageBatch, + "ImagePadForOutpaint": ImagePadForOutpaint, + "EmptyImage": EmptyImage, + "ConditioningAverage": ConditioningAverage , + "ConditioningCombine": ConditioningCombine, + "ConditioningConcat": ConditioningConcat, + "ConditioningSetArea": ConditioningSetArea, + "ConditioningSetAreaPercentage": ConditioningSetAreaPercentage, + "ConditioningSetMask": ConditioningSetMask, + "KSamplerAdvanced": KSamplerAdvanced, + "SetLatentNoiseMask": SetLatentNoiseMask, + "LatentComposite": LatentComposite, + "LatentBlend": LatentBlend, + "LatentRotate": LatentRotate, + "LatentFlip": LatentFlip, + "LatentCrop": LatentCrop, + "LoraLoader": LoraLoader, + "CLIPLoader": CLIPLoader, + "UNETLoader": UNETLoader, + "DualCLIPLoader": DualCLIPLoader, + "CLIPVisionEncode": CLIPVisionEncode, + "StyleModelApply": StyleModelApply, + "unCLIPConditioning": unCLIPConditioning, + "ControlNetApply": ControlNetApply, + "ControlNetApplyAdvanced": ControlNetApplyAdvanced, + "ControlNetLoader": ControlNetLoader, + "DiffControlNetLoader": DiffControlNetLoader, + "StyleModelLoader": StyleModelLoader, + "CLIPVisionLoader": CLIPVisionLoader, + "VAEDecodeTiled": VAEDecodeTiled, + "VAEEncodeTiled": VAEEncodeTiled, + "unCLIPCheckpointLoader": unCLIPCheckpointLoader, + "GLIGENLoader": GLIGENLoader, + "GLIGENTextBoxApply": GLIGENTextBoxApply, + "InpaintModelConditioning": InpaintModelConditioning, + + "CheckpointLoader": CheckpointLoader, + "DiffusersLoader": DiffusersLoader, + + "LoadLatent": LoadLatent, + "SaveLatent": SaveLatent, + + "ConditioningZeroOut": ConditioningZeroOut, + "ConditioningSetTimestepRange": ConditioningSetTimestepRange, + "LoraLoaderModelOnly": LoraLoaderModelOnly, +} + +NODE_DISPLAY_NAME_MAPPINGS = { + # Sampling + "KSampler": "KSampler", + "KSamplerAdvanced": "KSampler (Advanced)", + # Loaders + "CheckpointLoader": "Load Checkpoint With Config (DEPRECATED)", + "CheckpointLoaderSimple": "Load Checkpoint", + "VAELoader": "Load VAE", + "LoraLoader": "Load LoRA", + "CLIPLoader": "Load CLIP", + "ControlNetLoader": "Load ControlNet Model", + "DiffControlNetLoader": "Load ControlNet Model (diff)", + "StyleModelLoader": "Load Style Model", + "CLIPVisionLoader": "Load CLIP Vision", + "UpscaleModelLoader": "Load Upscale Model", + # Conditioning + "CLIPVisionEncode": "CLIP Vision Encode", + "StyleModelApply": "Apply Style Model", + "CLIPTextEncode": "CLIP Text Encode (Prompt)", + "CLIPSetLastLayer": "CLIP Set Last Layer", + "ConditioningCombine": "Conditioning (Combine)", + "ConditioningAverage ": "Conditioning (Average)", + "ConditioningConcat": "Conditioning (Concat)", + "ConditioningSetArea": "Conditioning (Set Area)", + "ConditioningSetAreaPercentage": "Conditioning (Set Area with Percentage)", + "ConditioningSetMask": "Conditioning (Set Mask)", + "ControlNetApply": "Apply ControlNet", + "ControlNetApplyAdvanced": "Apply ControlNet (Advanced)", + # Latent + "VAEEncodeForInpaint": "VAE Encode (for Inpainting)", + "SetLatentNoiseMask": "Set Latent Noise Mask", + "VAEDecode": "VAE Decode", + "VAEEncode": "VAE Encode", + "LatentRotate": "Rotate Latent", + "LatentFlip": "Flip Latent", + "LatentCrop": "Crop Latent", + "EmptyLatentImage": "Empty Latent Image", + "LatentUpscale": "Upscale Latent", + "LatentUpscaleBy": "Upscale Latent By", + "LatentComposite": "Latent Composite", + "LatentBlend": "Latent Blend", + "LatentFromBatch" : "Latent From Batch", + "RepeatLatentBatch": "Repeat Latent Batch", + # Image + "SaveImage": "Save Image", + "PreviewImage": "Preview Image", + "LoadImage": "Load Image", + "LoadImageMask": "Load Image (as Mask)", + "ImageScale": "Upscale Image", + "ImageScaleBy": "Upscale Image By", + "ImageUpscaleWithModel": "Upscale Image (using Model)", + "ImageInvert": "Invert Image", + "ImagePadForOutpaint": "Pad Image for Outpainting", + "ImageBatch": "Batch Images", + # _for_testing + "VAEDecodeTiled": "VAE Decode (Tiled)", + "VAEEncodeTiled": "VAE Encode (Tiled)", +} + +EXTENSION_WEB_DIRS = {} + +def load_custom_node(module_path, ignore=set()): + module_name = os.path.basename(module_path) + if os.path.isfile(module_path): + sp = os.path.splitext(module_path) + module_name = sp[0] + try: + if os.path.isfile(module_path): + module_spec = importlib.util.spec_from_file_location(module_name, module_path) + module_dir = os.path.split(module_path)[0] + else: + module_spec = importlib.util.spec_from_file_location(module_name, os.path.join(module_path, "__init__.py")) + module_dir = module_path + + module = importlib.util.module_from_spec(module_spec) + sys.modules[module_name] = module + module_spec.loader.exec_module(module) + + if hasattr(module, "WEB_DIRECTORY") and getattr(module, "WEB_DIRECTORY") is not None: + web_dir = os.path.abspath(os.path.join(module_dir, getattr(module, "WEB_DIRECTORY"))) + if os.path.isdir(web_dir): + EXTENSION_WEB_DIRS[module_name] = web_dir + + if hasattr(module, "NODE_CLASS_MAPPINGS") and getattr(module, "NODE_CLASS_MAPPINGS") is not None: + for name in module.NODE_CLASS_MAPPINGS: + if name not in ignore: + NODE_CLASS_MAPPINGS[name] = module.NODE_CLASS_MAPPINGS[name] + if hasattr(module, "NODE_DISPLAY_NAME_MAPPINGS") and getattr(module, "NODE_DISPLAY_NAME_MAPPINGS") is not None: + NODE_DISPLAY_NAME_MAPPINGS.update(module.NODE_DISPLAY_NAME_MAPPINGS) + return True + else: + print(f"Skip {module_path} module for custom nodes due to the lack of NODE_CLASS_MAPPINGS.") + return False + except Exception as e: + print(traceback.format_exc()) + print(f"Cannot import {module_path} module for custom nodes:", e) + return False + +def load_custom_nodes(): + base_node_names = set(NODE_CLASS_MAPPINGS.keys()) + node_paths = ldm_patched.utils.path_utils.get_folder_paths("custom_nodes") + node_import_times = [] + for custom_node_path in node_paths: + possible_modules = os.listdir(os.path.realpath(custom_node_path)) + if "__pycache__" in possible_modules: + possible_modules.remove("__pycache__") + + for possible_module in possible_modules: + module_path = os.path.join(custom_node_path, possible_module) + if os.path.isfile(module_path) and os.path.splitext(module_path)[1] != ".py": continue + if module_path.endswith(".disabled"): continue + time_before = time.perf_counter() + success = load_custom_node(module_path, base_node_names) + node_import_times.append((time.perf_counter() - time_before, module_path, success)) + + if len(node_import_times) > 0: + print("\nImport times for custom nodes:") + for n in sorted(node_import_times): + if n[2]: + import_message = "" + else: + import_message = " (IMPORT FAILED)" + print("{:6.1f} seconds{}:".format(n[0], import_message), n[1]) + print() + +def init_custom_nodes(): + extras_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "ldm_patched_extras") + extras_files = [ + "nodes_latent.py", + "nodes_hypernetwork.py", + "nodes_upscale_model.py", + "nodes_post_processing.py", + "nodes_mask.py", + "nodes_compositing.py", + "nodes_rebatch.py", + "nodes_model_merging.py", + "nodes_tomesd.py", + "nodes_clip_sdxl.py", + "nodes_canny.py", + "nodes_freelunch.py", + "nodes_custom_sampler.py", + "nodes_hypertile.py", + "nodes_model_advanced.py", + "nodes_model_downscale.py", + "nodes_images.py", + "nodes_video_model.py", + "nodes_sag.py", + "nodes_perpneg.py", + "nodes_stable3d.py", + "nodes_sdupscale.py", + "nodes_photomaker.py", + ] + + for node_file in extras_files: + load_custom_node(os.path.join(extras_dir, node_file)) + + load_custom_nodes() diff --git a/ldm_patched/contrib/external_canny.py b/ldm_patched/contrib/external_canny.py new file mode 100644 index 0000000000000000000000000000000000000000..7347ba1edcdf530720aa4c0689afe866391065e8 --- /dev/null +++ b/ldm_patched/contrib/external_canny.py @@ -0,0 +1,301 @@ +# https://github.com/comfyanonymous/ComfyUI/blob/master/nodes.py + +#From https://github.com/kornia/kornia +import math + +import torch +import torch.nn.functional as F +import ldm_patched.modules.model_management + +def get_canny_nms_kernel(device=None, dtype=None): + """Utility function that returns 3x3 kernels for the Canny Non-maximal suppression.""" + return torch.tensor( + [ + [[[0.0, 0.0, 0.0], [0.0, 1.0, -1.0], [0.0, 0.0, 0.0]]], + [[[0.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, -1.0]]], + [[[0.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, -1.0, 0.0]]], + [[[0.0, 0.0, 0.0], [0.0, 1.0, 0.0], [-1.0, 0.0, 0.0]]], + [[[0.0, 0.0, 0.0], [-1.0, 1.0, 0.0], [0.0, 0.0, 0.0]]], + [[[-1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 0.0]]], + [[[0.0, -1.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 0.0]]], + [[[0.0, 0.0, -1.0], [0.0, 1.0, 0.0], [0.0, 0.0, 0.0]]], + ], + device=device, + dtype=dtype, + ) + + +def get_hysteresis_kernel(device=None, dtype=None): + """Utility function that returns the 3x3 kernels for the Canny hysteresis.""" + return torch.tensor( + [ + [[[0.0, 0.0, 0.0], [0.0, 0.0, 1.0], [0.0, 0.0, 0.0]]], + [[[0.0, 0.0, 0.0], [0.0, 0.0, 0.0], [0.0, 0.0, 1.0]]], + [[[0.0, 0.0, 0.0], [0.0, 0.0, 0.0], [0.0, 1.0, 0.0]]], + [[[0.0, 0.0, 0.0], [0.0, 0.0, 0.0], [1.0, 0.0, 0.0]]], + [[[0.0, 0.0, 0.0], [1.0, 0.0, 0.0], [0.0, 0.0, 0.0]]], + [[[1.0, 0.0, 0.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0]]], + [[[0.0, 1.0, 0.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0]]], + [[[0.0, 0.0, 1.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0]]], + ], + device=device, + dtype=dtype, + ) + +def gaussian_blur_2d(img, kernel_size, sigma): + ksize_half = (kernel_size - 1) * 0.5 + + x = torch.linspace(-ksize_half, ksize_half, steps=kernel_size) + + pdf = torch.exp(-0.5 * (x / sigma).pow(2)) + + x_kernel = pdf / pdf.sum() + x_kernel = x_kernel.to(device=img.device, dtype=img.dtype) + + kernel2d = torch.mm(x_kernel[:, None], x_kernel[None, :]) + kernel2d = kernel2d.expand(img.shape[-3], 1, kernel2d.shape[0], kernel2d.shape[1]) + + padding = [kernel_size // 2, kernel_size // 2, kernel_size // 2, kernel_size // 2] + + img = torch.nn.functional.pad(img, padding, mode="reflect") + img = torch.nn.functional.conv2d(img, kernel2d, groups=img.shape[-3]) + + return img + +def get_sobel_kernel2d(device=None, dtype=None): + kernel_x = torch.tensor([[-1.0, 0.0, 1.0], [-2.0, 0.0, 2.0], [-1.0, 0.0, 1.0]], device=device, dtype=dtype) + kernel_y = kernel_x.transpose(0, 1) + return torch.stack([kernel_x, kernel_y]) + +def spatial_gradient(input, normalized: bool = True): + r"""Compute the first order image derivative in both x and y using a Sobel operator. + .. image:: _static/img/spatial_gradient.png + Args: + input: input image tensor with shape :math:`(B, C, H, W)`. + mode: derivatives modality, can be: `sobel` or `diff`. + order: the order of the derivatives. + normalized: whether the output is normalized. + Return: + the derivatives of the input feature map. with shape :math:`(B, C, 2, H, W)`. + .. note:: + See a working example `here `__. + Examples: + >>> input = torch.rand(1, 3, 4, 4) + >>> output = spatial_gradient(input) # 1x3x2x4x4 + >>> output.shape + torch.Size([1, 3, 2, 4, 4]) + """ + # KORNIA_CHECK_IS_TENSOR(input) + # KORNIA_CHECK_SHAPE(input, ['B', 'C', 'H', 'W']) + + # allocate kernel + kernel = get_sobel_kernel2d(device=input.device, dtype=input.dtype) + if normalized: + kernel = normalize_kernel2d(kernel) + + # prepare kernel + b, c, h, w = input.shape + tmp_kernel = kernel[:, None, ...] + + # Pad with "replicate for spatial dims, but with zeros for channel + spatial_pad = [kernel.size(1) // 2, kernel.size(1) // 2, kernel.size(2) // 2, kernel.size(2) // 2] + out_channels: int = 2 + padded_inp = torch.nn.functional.pad(input.reshape(b * c, 1, h, w), spatial_pad, 'replicate') + out = F.conv2d(padded_inp, tmp_kernel, groups=1, padding=0, stride=1) + return out.reshape(b, c, out_channels, h, w) + +def rgb_to_grayscale(image, rgb_weights = None): + r"""Convert a RGB image to grayscale version of image. + + .. image:: _static/img/rgb_to_grayscale.png + + The image data is assumed to be in the range of (0, 1). + + Args: + image: RGB image to be converted to grayscale with shape :math:`(*,3,H,W)`. + rgb_weights: Weights that will be applied on each channel (RGB). + The sum of the weights should add up to one. + Returns: + grayscale version of the image with shape :math:`(*,1,H,W)`. + + .. note:: + See a working example `here `__. + + Example: + >>> input = torch.rand(2, 3, 4, 5) + >>> gray = rgb_to_grayscale(input) # 2x1x4x5 + """ + + if len(image.shape) < 3 or image.shape[-3] != 3: + raise ValueError(f"Input size must have a shape of (*, 3, H, W). Got {image.shape}") + + if rgb_weights is None: + # 8 bit images + if image.dtype == torch.uint8: + rgb_weights = torch.tensor([76, 150, 29], device=image.device, dtype=torch.uint8) + # floating point images + elif image.dtype in (torch.float16, torch.float32, torch.float64): + rgb_weights = torch.tensor([0.299, 0.587, 0.114], device=image.device, dtype=image.dtype) + else: + raise TypeError(f"Unknown data type: {image.dtype}") + else: + # is tensor that we make sure is in the same device/dtype + rgb_weights = rgb_weights.to(image) + + # unpack the color image channels with RGB order + r: Tensor = image[..., 0:1, :, :] + g: Tensor = image[..., 1:2, :, :] + b: Tensor = image[..., 2:3, :, :] + + w_r, w_g, w_b = rgb_weights.unbind() + return w_r * r + w_g * g + w_b * b + +def canny( + input, + low_threshold = 0.1, + high_threshold = 0.2, + kernel_size = 5, + sigma = 1, + hysteresis = True, + eps = 1e-6, +): + r"""Find edges of the input image and filters them using the Canny algorithm. + .. image:: _static/img/canny.png + Args: + input: input image tensor with shape :math:`(B,C,H,W)`. + low_threshold: lower threshold for the hysteresis procedure. + high_threshold: upper threshold for the hysteresis procedure. + kernel_size: the size of the kernel for the gaussian blur. + sigma: the standard deviation of the kernel for the gaussian blur. + hysteresis: if True, applies the hysteresis edge tracking. + Otherwise, the edges are divided between weak (0.5) and strong (1) edges. + eps: regularization number to avoid NaN during backprop. + Returns: + - the canny edge magnitudes map, shape of :math:`(B,1,H,W)`. + - the canny edge detection filtered by thresholds and hysteresis, shape of :math:`(B,1,H,W)`. + .. note:: + See a working example `here `__. + Example: + >>> input = torch.rand(5, 3, 4, 4) + >>> magnitude, edges = canny(input) # 5x3x4x4 + >>> magnitude.shape + torch.Size([5, 1, 4, 4]) + >>> edges.shape + torch.Size([5, 1, 4, 4]) + """ + # KORNIA_CHECK_IS_TENSOR(input) + # KORNIA_CHECK_SHAPE(input, ['B', 'C', 'H', 'W']) + # KORNIA_CHECK( + # low_threshold <= high_threshold, + # "Invalid input thresholds. low_threshold should be smaller than the high_threshold. Got: " + # f"{low_threshold}>{high_threshold}", + # ) + # KORNIA_CHECK(0 < low_threshold < 1, f'Invalid low threshold. Should be in range (0, 1). Got: {low_threshold}') + # KORNIA_CHECK(0 < high_threshold < 1, f'Invalid high threshold. Should be in range (0, 1). Got: {high_threshold}') + + device = input.device + dtype = input.dtype + + # To Grayscale + if input.shape[1] == 3: + input = rgb_to_grayscale(input) + + # Gaussian filter + blurred: Tensor = gaussian_blur_2d(input, kernel_size, sigma) + + # Compute the gradients + gradients: Tensor = spatial_gradient(blurred, normalized=False) + + # Unpack the edges + gx: Tensor = gradients[:, :, 0] + gy: Tensor = gradients[:, :, 1] + + # Compute gradient magnitude and angle + magnitude: Tensor = torch.sqrt(gx * gx + gy * gy + eps) + angle: Tensor = torch.atan2(gy, gx) + + # Radians to Degrees + angle = 180.0 * angle / math.pi + + # Round angle to the nearest 45 degree + angle = torch.round(angle / 45) * 45 + + # Non-maximal suppression + nms_kernels: Tensor = get_canny_nms_kernel(device, dtype) + nms_magnitude: Tensor = F.conv2d(magnitude, nms_kernels, padding=nms_kernels.shape[-1] // 2) + + # Get the indices for both directions + positive_idx: Tensor = (angle / 45) % 8 + positive_idx = positive_idx.long() + + negative_idx: Tensor = ((angle / 45) + 4) % 8 + negative_idx = negative_idx.long() + + # Apply the non-maximum suppression to the different directions + channel_select_filtered_positive: Tensor = torch.gather(nms_magnitude, 1, positive_idx) + channel_select_filtered_negative: Tensor = torch.gather(nms_magnitude, 1, negative_idx) + + channel_select_filtered: Tensor = torch.stack( + [channel_select_filtered_positive, channel_select_filtered_negative], 1 + ) + + is_max: Tensor = channel_select_filtered.min(dim=1)[0] > 0.0 + + magnitude = magnitude * is_max + + # Threshold + edges: Tensor = F.threshold(magnitude, low_threshold, 0.0) + + low: Tensor = magnitude > low_threshold + high: Tensor = magnitude > high_threshold + + edges = low * 0.5 + high * 0.5 + edges = edges.to(dtype) + + # Hysteresis + if hysteresis: + edges_old: Tensor = -torch.ones(edges.shape, device=edges.device, dtype=dtype) + hysteresis_kernels: Tensor = get_hysteresis_kernel(device, dtype) + + while ((edges_old - edges).abs() != 0).any(): + weak: Tensor = (edges == 0.5).float() + strong: Tensor = (edges == 1).float() + + hysteresis_magnitude: Tensor = F.conv2d( + edges, hysteresis_kernels, padding=hysteresis_kernels.shape[-1] // 2 + ) + hysteresis_magnitude = (hysteresis_magnitude == 1).any(1, keepdim=True).to(dtype) + hysteresis_magnitude = hysteresis_magnitude * weak + strong + + edges_old = edges.clone() + edges = hysteresis_magnitude + (hysteresis_magnitude == 0) * weak * 0.5 + + edges = hysteresis_magnitude + + return magnitude, edges + + +class Canny: + @classmethod + def INPUT_TYPES(s): + return {"required": {"image": ("IMAGE",), + "low_threshold": ("FLOAT", {"default": 0.4, "min": 0.01, "max": 0.99, "step": 0.01}), + "high_threshold": ("FLOAT", {"default": 0.8, "min": 0.01, "max": 0.99, "step": 0.01}) + }} + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "detect_edge" + + CATEGORY = "image/preprocessors" + + def detect_edge(self, image, low_threshold, high_threshold): + output = canny(image.to(ldm_patched.modules.model_management.get_torch_device()).movedim(-1, 1), low_threshold, high_threshold) + img_out = output[1].to(ldm_patched.modules.model_management.intermediate_device()).repeat(1, 3, 1, 1).movedim(1, -1) + return (img_out,) + +NODE_CLASS_MAPPINGS = { + "Canny": Canny, +} diff --git a/ldm_patched/contrib/external_clip_sdxl.py b/ldm_patched/contrib/external_clip_sdxl.py new file mode 100644 index 0000000000000000000000000000000000000000..230321a87a1344409459ed9803fda09083cb98f1 --- /dev/null +++ b/ldm_patched/contrib/external_clip_sdxl.py @@ -0,0 +1,58 @@ +# https://github.com/comfyanonymous/ComfyUI/blob/master/nodes.py + +import torch +from ldm_patched.contrib.external import MAX_RESOLUTION + +class CLIPTextEncodeSDXLRefiner: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "ascore": ("FLOAT", {"default": 6.0, "min": 0.0, "max": 1000.0, "step": 0.01}), + "width": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}), + "height": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}), + "text": ("STRING", {"multiline": True}), "clip": ("CLIP", ), + }} + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "encode" + + CATEGORY = "advanced/conditioning" + + def encode(self, clip, ascore, width, height, text): + tokens = clip.tokenize(text) + cond, pooled = clip.encode_from_tokens(tokens, return_pooled=True) + return ([[cond, {"pooled_output": pooled, "aesthetic_score": ascore, "width": width,"height": height}]], ) + +class CLIPTextEncodeSDXL: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "width": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}), + "height": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}), + "crop_w": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION}), + "crop_h": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION}), + "target_width": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}), + "target_height": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}), + "text_g": ("STRING", {"multiline": True, "default": "CLIP_G"}), "clip": ("CLIP", ), + "text_l": ("STRING", {"multiline": True, "default": "CLIP_L"}), "clip": ("CLIP", ), + }} + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "encode" + + CATEGORY = "advanced/conditioning" + + def encode(self, clip, width, height, crop_w, crop_h, target_width, target_height, text_g, text_l): + tokens = clip.tokenize(text_g) + tokens["l"] = clip.tokenize(text_l)["l"] + if len(tokens["l"]) != len(tokens["g"]): + empty = clip.tokenize("") + while len(tokens["l"]) < len(tokens["g"]): + tokens["l"] += empty["l"] + while len(tokens["l"]) > len(tokens["g"]): + tokens["g"] += empty["g"] + cond, pooled = clip.encode_from_tokens(tokens, return_pooled=True) + return ([[cond, {"pooled_output": pooled, "width": width, "height": height, "crop_w": crop_w, "crop_h": crop_h, "target_width": target_width, "target_height": target_height}]], ) + +NODE_CLASS_MAPPINGS = { + "CLIPTextEncodeSDXLRefiner": CLIPTextEncodeSDXLRefiner, + "CLIPTextEncodeSDXL": CLIPTextEncodeSDXL, +} diff --git a/ldm_patched/contrib/external_compositing.py b/ldm_patched/contrib/external_compositing.py new file mode 100644 index 0000000000000000000000000000000000000000..0cf91d9a75bb72d89925d0eb14014fbcb41e8adf --- /dev/null +++ b/ldm_patched/contrib/external_compositing.py @@ -0,0 +1,204 @@ +# https://github.com/comfyanonymous/ComfyUI/blob/master/nodes.py + +import numpy as np +import torch +import ldm_patched.modules.utils +from enum import Enum + +def resize_mask(mask, shape): + return torch.nn.functional.interpolate(mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])), size=(shape[0], shape[1]), mode="bilinear").squeeze(1) + +class PorterDuffMode(Enum): + ADD = 0 + CLEAR = 1 + DARKEN = 2 + DST = 3 + DST_ATOP = 4 + DST_IN = 5 + DST_OUT = 6 + DST_OVER = 7 + LIGHTEN = 8 + MULTIPLY = 9 + OVERLAY = 10 + SCREEN = 11 + SRC = 12 + SRC_ATOP = 13 + SRC_IN = 14 + SRC_OUT = 15 + SRC_OVER = 16 + XOR = 17 + + +def porter_duff_composite(src_image: torch.Tensor, src_alpha: torch.Tensor, dst_image: torch.Tensor, dst_alpha: torch.Tensor, mode: PorterDuffMode): + if mode == PorterDuffMode.ADD: + out_alpha = torch.clamp(src_alpha + dst_alpha, 0, 1) + out_image = torch.clamp(src_image + dst_image, 0, 1) + elif mode == PorterDuffMode.CLEAR: + out_alpha = torch.zeros_like(dst_alpha) + out_image = torch.zeros_like(dst_image) + elif mode == PorterDuffMode.DARKEN: + out_alpha = src_alpha + dst_alpha - src_alpha * dst_alpha + out_image = (1 - dst_alpha) * src_image + (1 - src_alpha) * dst_image + torch.min(src_image, dst_image) + elif mode == PorterDuffMode.DST: + out_alpha = dst_alpha + out_image = dst_image + elif mode == PorterDuffMode.DST_ATOP: + out_alpha = src_alpha + out_image = src_alpha * dst_image + (1 - dst_alpha) * src_image + elif mode == PorterDuffMode.DST_IN: + out_alpha = src_alpha * dst_alpha + out_image = dst_image * src_alpha + elif mode == PorterDuffMode.DST_OUT: + out_alpha = (1 - src_alpha) * dst_alpha + out_image = (1 - src_alpha) * dst_image + elif mode == PorterDuffMode.DST_OVER: + out_alpha = dst_alpha + (1 - dst_alpha) * src_alpha + out_image = dst_image + (1 - dst_alpha) * src_image + elif mode == PorterDuffMode.LIGHTEN: + out_alpha = src_alpha + dst_alpha - src_alpha * dst_alpha + out_image = (1 - dst_alpha) * src_image + (1 - src_alpha) * dst_image + torch.max(src_image, dst_image) + elif mode == PorterDuffMode.MULTIPLY: + out_alpha = src_alpha * dst_alpha + out_image = src_image * dst_image + elif mode == PorterDuffMode.OVERLAY: + out_alpha = src_alpha + dst_alpha - src_alpha * dst_alpha + out_image = torch.where(2 * dst_image < dst_alpha, 2 * src_image * dst_image, + src_alpha * dst_alpha - 2 * (dst_alpha - src_image) * (src_alpha - dst_image)) + elif mode == PorterDuffMode.SCREEN: + out_alpha = src_alpha + dst_alpha - src_alpha * dst_alpha + out_image = src_image + dst_image - src_image * dst_image + elif mode == PorterDuffMode.SRC: + out_alpha = src_alpha + out_image = src_image + elif mode == PorterDuffMode.SRC_ATOP: + out_alpha = dst_alpha + out_image = dst_alpha * src_image + (1 - src_alpha) * dst_image + elif mode == PorterDuffMode.SRC_IN: + out_alpha = src_alpha * dst_alpha + out_image = src_image * dst_alpha + elif mode == PorterDuffMode.SRC_OUT: + out_alpha = (1 - dst_alpha) * src_alpha + out_image = (1 - dst_alpha) * src_image + elif mode == PorterDuffMode.SRC_OVER: + out_alpha = src_alpha + (1 - src_alpha) * dst_alpha + out_image = src_image + (1 - src_alpha) * dst_image + elif mode == PorterDuffMode.XOR: + out_alpha = (1 - dst_alpha) * src_alpha + (1 - src_alpha) * dst_alpha + out_image = (1 - dst_alpha) * src_image + (1 - src_alpha) * dst_image + else: + out_alpha = None + out_image = None + return out_image, out_alpha + + +class PorterDuffImageComposite: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "source": ("IMAGE",), + "source_alpha": ("MASK",), + "destination": ("IMAGE",), + "destination_alpha": ("MASK",), + "mode": ([mode.name for mode in PorterDuffMode], {"default": PorterDuffMode.DST.name}), + }, + } + + RETURN_TYPES = ("IMAGE", "MASK") + FUNCTION = "composite" + CATEGORY = "mask/compositing" + + def composite(self, source: torch.Tensor, source_alpha: torch.Tensor, destination: torch.Tensor, destination_alpha: torch.Tensor, mode): + batch_size = min(len(source), len(source_alpha), len(destination), len(destination_alpha)) + out_images = [] + out_alphas = [] + + for i in range(batch_size): + src_image = source[i] + dst_image = destination[i] + + assert src_image.shape[2] == dst_image.shape[2] # inputs need to have same number of channels + + src_alpha = source_alpha[i].unsqueeze(2) + dst_alpha = destination_alpha[i].unsqueeze(2) + + if dst_alpha.shape[:2] != dst_image.shape[:2]: + upscale_input = dst_alpha.unsqueeze(0).permute(0, 3, 1, 2) + upscale_output = ldm_patched.modules.utils.common_upscale(upscale_input, dst_image.shape[1], dst_image.shape[0], upscale_method='bicubic', crop='center') + dst_alpha = upscale_output.permute(0, 2, 3, 1).squeeze(0) + if src_image.shape != dst_image.shape: + upscale_input = src_image.unsqueeze(0).permute(0, 3, 1, 2) + upscale_output = ldm_patched.modules.utils.common_upscale(upscale_input, dst_image.shape[1], dst_image.shape[0], upscale_method='bicubic', crop='center') + src_image = upscale_output.permute(0, 2, 3, 1).squeeze(0) + if src_alpha.shape != dst_alpha.shape: + upscale_input = src_alpha.unsqueeze(0).permute(0, 3, 1, 2) + upscale_output = ldm_patched.modules.utils.common_upscale(upscale_input, dst_alpha.shape[1], dst_alpha.shape[0], upscale_method='bicubic', crop='center') + src_alpha = upscale_output.permute(0, 2, 3, 1).squeeze(0) + + out_image, out_alpha = porter_duff_composite(src_image, src_alpha, dst_image, dst_alpha, PorterDuffMode[mode]) + + out_images.append(out_image) + out_alphas.append(out_alpha.squeeze(2)) + + result = (torch.stack(out_images), torch.stack(out_alphas)) + return result + + +class SplitImageWithAlpha: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + } + } + + CATEGORY = "mask/compositing" + RETURN_TYPES = ("IMAGE", "MASK") + FUNCTION = "split_image_with_alpha" + + def split_image_with_alpha(self, image: torch.Tensor): + out_images = [i[:,:,:3] for i in image] + out_alphas = [i[:,:,3] if i.shape[2] > 3 else torch.ones_like(i[:,:,0]) for i in image] + result = (torch.stack(out_images), 1.0 - torch.stack(out_alphas)) + return result + + +class JoinImageWithAlpha: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "alpha": ("MASK",), + } + } + + CATEGORY = "mask/compositing" + RETURN_TYPES = ("IMAGE",) + FUNCTION = "join_image_with_alpha" + + def join_image_with_alpha(self, image: torch.Tensor, alpha: torch.Tensor): + batch_size = min(len(image), len(alpha)) + out_images = [] + + alpha = 1.0 - resize_mask(alpha, image.shape[1:]) + for i in range(batch_size): + out_images.append(torch.cat((image[i][:,:,:3], alpha[i].unsqueeze(2)), dim=2)) + + result = (torch.stack(out_images),) + return result + + +NODE_CLASS_MAPPINGS = { + "PorterDuffImageComposite": PorterDuffImageComposite, + "SplitImageWithAlpha": SplitImageWithAlpha, + "JoinImageWithAlpha": JoinImageWithAlpha, +} + + +NODE_DISPLAY_NAME_MAPPINGS = { + "PorterDuffImageComposite": "Porter-Duff Image Composite", + "SplitImageWithAlpha": "Split Image with Alpha", + "JoinImageWithAlpha": "Join Image with Alpha", +} diff --git a/ldm_patched/contrib/external_custom_sampler.py b/ldm_patched/contrib/external_custom_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..8f92e841f4d1a3741c970f05392be5758be79d33 --- /dev/null +++ b/ldm_patched/contrib/external_custom_sampler.py @@ -0,0 +1,297 @@ +# https://github.com/comfyanonymous/ComfyUI/blob/master/nodes.py + +import ldm_patched.modules.samplers +import ldm_patched.modules.sample +from ldm_patched.k_diffusion import sampling as k_diffusion_sampling +import ldm_patched.utils.latent_visualization +import torch +import ldm_patched.modules.utils + + +class BasicScheduler: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"model": ("MODEL",), + "scheduler": (ldm_patched.modules.samplers.SCHEDULER_NAMES, ), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + } + } + RETURN_TYPES = ("SIGMAS",) + CATEGORY = "sampling/custom_sampling/schedulers" + + FUNCTION = "get_sigmas" + + def get_sigmas(self, model, scheduler, steps, denoise): + total_steps = steps + if denoise < 1.0: + total_steps = int(steps/denoise) + + ldm_patched.modules.model_management.load_models_gpu([model]) + sigmas = ldm_patched.modules.samplers.calculate_sigmas_scheduler(model.model, scheduler, total_steps).cpu() + sigmas = sigmas[-(steps + 1):] + return (sigmas, ) + + +class KarrasScheduler: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "sigma_max": ("FLOAT", {"default": 14.614642, "min": 0.0, "max": 1000.0, "step":0.01, "round": False}), + "sigma_min": ("FLOAT", {"default": 0.0291675, "min": 0.0, "max": 1000.0, "step":0.01, "round": False}), + "rho": ("FLOAT", {"default": 7.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), + } + } + RETURN_TYPES = ("SIGMAS",) + CATEGORY = "sampling/custom_sampling/schedulers" + + FUNCTION = "get_sigmas" + + def get_sigmas(self, steps, sigma_max, sigma_min, rho): + sigmas = k_diffusion_sampling.get_sigmas_karras(n=steps, sigma_min=sigma_min, sigma_max=sigma_max, rho=rho) + return (sigmas, ) + +class ExponentialScheduler: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "sigma_max": ("FLOAT", {"default": 14.614642, "min": 0.0, "max": 1000.0, "step":0.01, "round": False}), + "sigma_min": ("FLOAT", {"default": 0.0291675, "min": 0.0, "max": 1000.0, "step":0.01, "round": False}), + } + } + RETURN_TYPES = ("SIGMAS",) + CATEGORY = "sampling/custom_sampling/schedulers" + + FUNCTION = "get_sigmas" + + def get_sigmas(self, steps, sigma_max, sigma_min): + sigmas = k_diffusion_sampling.get_sigmas_exponential(n=steps, sigma_min=sigma_min, sigma_max=sigma_max) + return (sigmas, ) + +class PolyexponentialScheduler: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "sigma_max": ("FLOAT", {"default": 14.614642, "min": 0.0, "max": 1000.0, "step":0.01, "round": False}), + "sigma_min": ("FLOAT", {"default": 0.0291675, "min": 0.0, "max": 1000.0, "step":0.01, "round": False}), + "rho": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), + } + } + RETURN_TYPES = ("SIGMAS",) + CATEGORY = "sampling/custom_sampling/schedulers" + + FUNCTION = "get_sigmas" + + def get_sigmas(self, steps, sigma_max, sigma_min, rho): + sigmas = k_diffusion_sampling.get_sigmas_polyexponential(n=steps, sigma_min=sigma_min, sigma_max=sigma_max, rho=rho) + return (sigmas, ) + +class SDTurboScheduler: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"model": ("MODEL",), + "steps": ("INT", {"default": 1, "min": 1, "max": 10}), + "denoise": ("FLOAT", {"default": 1.0, "min": 0, "max": 1.0, "step": 0.01}), + } + } + RETURN_TYPES = ("SIGMAS",) + CATEGORY = "sampling/custom_sampling/schedulers" + + FUNCTION = "get_sigmas" + + def get_sigmas(self, model, steps, denoise): + start_step = 10 - int(10 * denoise) + timesteps = torch.flip(torch.arange(1, 11) * 100 - 1, (0,))[start_step:start_step + steps] + ldm_patched.modules.model_management.load_models_gpu([model]) + sigmas = model.model.model_sampling.sigma(timesteps) + sigmas = torch.cat([sigmas, sigmas.new_zeros([1])]) + return (sigmas, ) + +class VPScheduler: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "beta_d": ("FLOAT", {"default": 19.9, "min": 0.0, "max": 1000.0, "step":0.01, "round": False}), #TODO: fix default values + "beta_min": ("FLOAT", {"default": 0.1, "min": 0.0, "max": 1000.0, "step":0.01, "round": False}), + "eps_s": ("FLOAT", {"default": 0.001, "min": 0.0, "max": 1.0, "step":0.0001, "round": False}), + } + } + RETURN_TYPES = ("SIGMAS",) + CATEGORY = "sampling/custom_sampling/schedulers" + + FUNCTION = "get_sigmas" + + def get_sigmas(self, steps, beta_d, beta_min, eps_s): + sigmas = k_diffusion_sampling.get_sigmas_vp(n=steps, beta_d=beta_d, beta_min=beta_min, eps_s=eps_s) + return (sigmas, ) + +class SplitSigmas: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"sigmas": ("SIGMAS", ), + "step": ("INT", {"default": 0, "min": 0, "max": 10000}), + } + } + RETURN_TYPES = ("SIGMAS","SIGMAS") + CATEGORY = "sampling/custom_sampling/sigmas" + + FUNCTION = "get_sigmas" + + def get_sigmas(self, sigmas, step): + sigmas1 = sigmas[:step + 1] + sigmas2 = sigmas[step:] + return (sigmas1, sigmas2) + +class FlipSigmas: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"sigmas": ("SIGMAS", ), + } + } + RETURN_TYPES = ("SIGMAS",) + CATEGORY = "sampling/custom_sampling/sigmas" + + FUNCTION = "get_sigmas" + + def get_sigmas(self, sigmas): + sigmas = sigmas.flip(0) + if sigmas[0] == 0: + sigmas[0] = 0.0001 + return (sigmas,) + +class KSamplerSelect: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"sampler_name": (ldm_patched.modules.samplers.SAMPLER_NAMES, ), + } + } + RETURN_TYPES = ("SAMPLER",) + CATEGORY = "sampling/custom_sampling/samplers" + + FUNCTION = "get_sampler" + + def get_sampler(self, sampler_name): + sampler = ldm_patched.modules.samplers.sampler_object(sampler_name) + return (sampler, ) + +class SamplerDPMPP_2M_SDE: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"solver_type": (['midpoint', 'heun'], ), + "eta": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), + "s_noise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), + "noise_device": (['gpu', 'cpu'], ), + } + } + RETURN_TYPES = ("SAMPLER",) + CATEGORY = "sampling/custom_sampling/samplers" + + FUNCTION = "get_sampler" + + def get_sampler(self, solver_type, eta, s_noise, noise_device): + if noise_device == 'cpu': + sampler_name = "dpmpp_2m_sde" + else: + sampler_name = "dpmpp_2m_sde_gpu" + sampler = ldm_patched.modules.samplers.ksampler(sampler_name, {"eta": eta, "s_noise": s_noise, "solver_type": solver_type}) + return (sampler, ) + + +class SamplerDPMPP_SDE: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"eta": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), + "s_noise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), + "r": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), + "noise_device": (['gpu', 'cpu'], ), + } + } + RETURN_TYPES = ("SAMPLER",) + CATEGORY = "sampling/custom_sampling/samplers" + + FUNCTION = "get_sampler" + + def get_sampler(self, eta, s_noise, r, noise_device): + if noise_device == 'cpu': + sampler_name = "dpmpp_sde" + else: + sampler_name = "dpmpp_sde_gpu" + sampler = ldm_patched.modules.samplers.ksampler(sampler_name, {"eta": eta, "s_noise": s_noise, "r": r}) + return (sampler, ) + +class SamplerCustom: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"model": ("MODEL",), + "add_noise": ("BOOLEAN", {"default": True}), + "noise_seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0, "step":0.1, "round": 0.01}), + "positive": ("CONDITIONING", ), + "negative": ("CONDITIONING", ), + "sampler": ("SAMPLER", ), + "sigmas": ("SIGMAS", ), + "latent_image": ("LATENT", ), + } + } + + RETURN_TYPES = ("LATENT","LATENT") + RETURN_NAMES = ("output", "denoised_output") + + FUNCTION = "sample" + + CATEGORY = "sampling/custom_sampling" + + def sample(self, model, add_noise, noise_seed, cfg, positive, negative, sampler, sigmas, latent_image): + latent = latent_image + latent_image = latent["samples"] + if not add_noise: + noise = torch.zeros(latent_image.size(), dtype=latent_image.dtype, layout=latent_image.layout, device="cpu") + else: + batch_inds = latent["batch_index"] if "batch_index" in latent else None + noise = ldm_patched.modules.sample.prepare_noise(latent_image, noise_seed, batch_inds) + + noise_mask = None + if "noise_mask" in latent: + noise_mask = latent["noise_mask"] + + x0_output = {} + callback = ldm_patched.utils.latent_visualization.prepare_callback(model, sigmas.shape[-1] - 1, x0_output) + + disable_pbar = not ldm_patched.modules.utils.PROGRESS_BAR_ENABLED + samples = ldm_patched.modules.sample.sample_custom(model, noise, cfg, sampler, sigmas, positive, negative, latent_image, noise_mask=noise_mask, callback=callback, disable_pbar=disable_pbar, seed=noise_seed) + + out = latent.copy() + out["samples"] = samples + if "x0" in x0_output: + out_denoised = latent.copy() + out_denoised["samples"] = model.model.process_latent_out(x0_output["x0"].cpu()) + else: + out_denoised = out + return (out, out_denoised) + +NODE_CLASS_MAPPINGS = { + "SamplerCustom": SamplerCustom, + "BasicScheduler": BasicScheduler, + "KarrasScheduler": KarrasScheduler, + "ExponentialScheduler": ExponentialScheduler, + "PolyexponentialScheduler": PolyexponentialScheduler, + "VPScheduler": VPScheduler, + "SDTurboScheduler": SDTurboScheduler, + "KSamplerSelect": KSamplerSelect, + "SamplerDPMPP_2M_SDE": SamplerDPMPP_2M_SDE, + "SamplerDPMPP_SDE": SamplerDPMPP_SDE, + "SplitSigmas": SplitSigmas, + "FlipSigmas": FlipSigmas, +} diff --git a/ldm_patched/contrib/external_freelunch.py b/ldm_patched/contrib/external_freelunch.py new file mode 100644 index 0000000000000000000000000000000000000000..59ec5babd632d7804b9c34442729910f0cb338d2 --- /dev/null +++ b/ldm_patched/contrib/external_freelunch.py @@ -0,0 +1,115 @@ +# https://github.com/comfyanonymous/ComfyUI/blob/master/nodes.py + +#code originally taken from: https://github.com/ChenyangSi/FreeU (under MIT License) + +import torch + + +def Fourier_filter(x, threshold, scale): + # FFT + x_freq = torch.fft.fftn(x.float(), dim=(-2, -1)) + x_freq = torch.fft.fftshift(x_freq, dim=(-2, -1)) + + B, C, H, W = x_freq.shape + mask = torch.ones((B, C, H, W), device=x.device) + + crow, ccol = H // 2, W //2 + mask[..., crow - threshold:crow + threshold, ccol - threshold:ccol + threshold] = scale + x_freq = x_freq * mask + + # IFFT + x_freq = torch.fft.ifftshift(x_freq, dim=(-2, -1)) + x_filtered = torch.fft.ifftn(x_freq, dim=(-2, -1)).real + + return x_filtered.to(x.dtype) + + +class FreeU: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "b1": ("FLOAT", {"default": 1.1, "min": 0.0, "max": 10.0, "step": 0.01}), + "b2": ("FLOAT", {"default": 1.2, "min": 0.0, "max": 10.0, "step": 0.01}), + "s1": ("FLOAT", {"default": 0.9, "min": 0.0, "max": 10.0, "step": 0.01}), + "s2": ("FLOAT", {"default": 0.2, "min": 0.0, "max": 10.0, "step": 0.01}), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "model_patches" + + def patch(self, model, b1, b2, s1, s2): + model_channels = model.model.model_config.unet_config["model_channels"] + scale_dict = {model_channels * 4: (b1, s1), model_channels * 2: (b2, s2)} + on_cpu_devices = {} + + def output_block_patch(h, hsp, transformer_options): + scale = scale_dict.get(h.shape[1], None) + if scale is not None: + h[:,:h.shape[1] // 2] = h[:,:h.shape[1] // 2] * scale[0] + if hsp.device not in on_cpu_devices: + try: + hsp = Fourier_filter(hsp, threshold=1, scale=scale[1]) + except: + print("Device", hsp.device, "does not support the torch.fft functions used in the FreeU node, switching to CPU.") + on_cpu_devices[hsp.device] = True + hsp = Fourier_filter(hsp.cpu(), threshold=1, scale=scale[1]).to(hsp.device) + else: + hsp = Fourier_filter(hsp.cpu(), threshold=1, scale=scale[1]).to(hsp.device) + + return h, hsp + + m = model.clone() + m.set_model_output_block_patch(output_block_patch) + return (m, ) + +class FreeU_V2: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "b1": ("FLOAT", {"default": 1.3, "min": 0.0, "max": 10.0, "step": 0.01}), + "b2": ("FLOAT", {"default": 1.4, "min": 0.0, "max": 10.0, "step": 0.01}), + "s1": ("FLOAT", {"default": 0.9, "min": 0.0, "max": 10.0, "step": 0.01}), + "s2": ("FLOAT", {"default": 0.2, "min": 0.0, "max": 10.0, "step": 0.01}), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "model_patches" + + def patch(self, model, b1, b2, s1, s2): + model_channels = model.model.model_config.unet_config["model_channels"] + scale_dict = {model_channels * 4: (b1, s1), model_channels * 2: (b2, s2)} + on_cpu_devices = {} + + def output_block_patch(h, hsp, transformer_options): + scale = scale_dict.get(h.shape[1], None) + if scale is not None: + hidden_mean = h.mean(1).unsqueeze(1) + B = hidden_mean.shape[0] + hidden_max, _ = torch.max(hidden_mean.view(B, -1), dim=-1, keepdim=True) + hidden_min, _ = torch.min(hidden_mean.view(B, -1), dim=-1, keepdim=True) + hidden_mean = (hidden_mean - hidden_min.unsqueeze(2).unsqueeze(3)) / (hidden_max - hidden_min).unsqueeze(2).unsqueeze(3) + + h[:,:h.shape[1] // 2] = h[:,:h.shape[1] // 2] * ((scale[0] - 1 ) * hidden_mean + 1) + + if hsp.device not in on_cpu_devices: + try: + hsp = Fourier_filter(hsp, threshold=1, scale=scale[1]) + except: + print("Device", hsp.device, "does not support the torch.fft functions used in the FreeU node, switching to CPU.") + on_cpu_devices[hsp.device] = True + hsp = Fourier_filter(hsp.cpu(), threshold=1, scale=scale[1]).to(hsp.device) + else: + hsp = Fourier_filter(hsp.cpu(), threshold=1, scale=scale[1]).to(hsp.device) + + return h, hsp + + m = model.clone() + m.set_model_output_block_patch(output_block_patch) + return (m, ) + +NODE_CLASS_MAPPINGS = { + "FreeU": FreeU, + "FreeU_V2": FreeU_V2, +} diff --git a/ldm_patched/contrib/external_hypernetwork.py b/ldm_patched/contrib/external_hypernetwork.py new file mode 100644 index 0000000000000000000000000000000000000000..17aaacb009d5d2e8de8af547645ef6a082b9da46 --- /dev/null +++ b/ldm_patched/contrib/external_hypernetwork.py @@ -0,0 +1,121 @@ +# https://github.com/comfyanonymous/ComfyUI/blob/master/nodes.py + +import ldm_patched.modules.utils +import ldm_patched.utils.path_utils +import torch + +def load_hypernetwork_patch(path, strength): + sd = ldm_patched.modules.utils.load_torch_file(path, safe_load=True) + activation_func = sd.get('activation_func', 'linear') + is_layer_norm = sd.get('is_layer_norm', False) + use_dropout = sd.get('use_dropout', False) + activate_output = sd.get('activate_output', False) + last_layer_dropout = sd.get('last_layer_dropout', False) + + valid_activation = { + "linear": torch.nn.Identity, + "relu": torch.nn.ReLU, + "leakyrelu": torch.nn.LeakyReLU, + "elu": torch.nn.ELU, + "swish": torch.nn.Hardswish, + "tanh": torch.nn.Tanh, + "sigmoid": torch.nn.Sigmoid, + "softsign": torch.nn.Softsign, + "mish": torch.nn.Mish, + } + + if activation_func not in valid_activation: + print("Unsupported Hypernetwork format, if you report it I might implement it.", path, " ", activation_func, is_layer_norm, use_dropout, activate_output, last_layer_dropout) + return None + + out = {} + + for d in sd: + try: + dim = int(d) + except: + continue + + output = [] + for index in [0, 1]: + attn_weights = sd[dim][index] + keys = attn_weights.keys() + + linears = filter(lambda a: a.endswith(".weight"), keys) + linears = list(map(lambda a: a[:-len(".weight")], linears)) + layers = [] + + i = 0 + while i < len(linears): + lin_name = linears[i] + last_layer = (i == (len(linears) - 1)) + penultimate_layer = (i == (len(linears) - 2)) + + lin_weight = attn_weights['{}.weight'.format(lin_name)] + lin_bias = attn_weights['{}.bias'.format(lin_name)] + layer = torch.nn.Linear(lin_weight.shape[1], lin_weight.shape[0]) + layer.load_state_dict({"weight": lin_weight, "bias": lin_bias}) + layers.append(layer) + if activation_func != "linear": + if (not last_layer) or (activate_output): + layers.append(valid_activation[activation_func]()) + if is_layer_norm: + i += 1 + ln_name = linears[i] + ln_weight = attn_weights['{}.weight'.format(ln_name)] + ln_bias = attn_weights['{}.bias'.format(ln_name)] + ln = torch.nn.LayerNorm(ln_weight.shape[0]) + ln.load_state_dict({"weight": ln_weight, "bias": ln_bias}) + layers.append(ln) + if use_dropout: + if (not last_layer) and (not penultimate_layer or last_layer_dropout): + layers.append(torch.nn.Dropout(p=0.3)) + i += 1 + + output.append(torch.nn.Sequential(*layers)) + out[dim] = torch.nn.ModuleList(output) + + class hypernetwork_patch: + def __init__(self, hypernet, strength): + self.hypernet = hypernet + self.strength = strength + def __call__(self, q, k, v, extra_options): + dim = k.shape[-1] + if dim in self.hypernet: + hn = self.hypernet[dim] + k = k + hn[0](k) * self.strength + v = v + hn[1](v) * self.strength + + return q, k, v + + def to(self, device): + for d in self.hypernet.keys(): + self.hypernet[d] = self.hypernet[d].to(device) + return self + + return hypernetwork_patch(out, strength) + +class HypernetworkLoader: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "hypernetwork_name": (ldm_patched.utils.path_utils.get_filename_list("hypernetworks"), ), + "strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "load_hypernetwork" + + CATEGORY = "loaders" + + def load_hypernetwork(self, model, hypernetwork_name, strength): + hypernetwork_path = ldm_patched.utils.path_utils.get_full_path("hypernetworks", hypernetwork_name) + model_hypernetwork = model.clone() + patch = load_hypernetwork_patch(hypernetwork_path, strength) + if patch is not None: + model_hypernetwork.set_model_attn1_patch(patch) + model_hypernetwork.set_model_attn2_patch(patch) + return (model_hypernetwork,) + +NODE_CLASS_MAPPINGS = { + "HypernetworkLoader": HypernetworkLoader +} diff --git a/ldm_patched/contrib/external_hypertile.py b/ldm_patched/contrib/external_hypertile.py new file mode 100644 index 0000000000000000000000000000000000000000..5cf7d9d6d1609b6a02a89a8a74aca9fdccaeda58 --- /dev/null +++ b/ldm_patched/contrib/external_hypertile.py @@ -0,0 +1,85 @@ +# https://github.com/comfyanonymous/ComfyUI/blob/master/nodes.py + +#Taken from: https://github.com/tfernd/HyperTile/ + +import math +from einops import rearrange +# Use torch rng for consistency across generations +from torch import randint + +def random_divisor(value: int, min_value: int, /, max_options: int = 1) -> int: + min_value = min(min_value, value) + + # All big divisors of value (inclusive) + divisors = [i for i in range(min_value, value + 1) if value % i == 0] + + ns = [value // i for i in divisors[:max_options]] # has at least 1 element + + if len(ns) - 1 > 0: + idx = randint(low=0, high=len(ns) - 1, size=(1,)).item() + else: + idx = 0 + + return ns[idx] + +class HyperTile: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "tile_size": ("INT", {"default": 256, "min": 1, "max": 2048}), + "swap_size": ("INT", {"default": 2, "min": 1, "max": 128}), + "max_depth": ("INT", {"default": 0, "min": 0, "max": 10}), + "scale_depth": ("BOOLEAN", {"default": False}), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "model_patches" + + def patch(self, model, tile_size, swap_size, max_depth, scale_depth): + model_channels = model.model.model_config.unet_config["model_channels"] + + latent_tile_size = max(32, tile_size) // 8 + self.temp = None + + def hypertile_in(q, k, v, extra_options): + model_chans = q.shape[-2] + orig_shape = extra_options['original_shape'] + apply_to = [] + for i in range(max_depth + 1): + apply_to.append((orig_shape[-2] / (2 ** i)) * (orig_shape[-1] / (2 ** i))) + + if model_chans in apply_to: + shape = extra_options["original_shape"] + aspect_ratio = shape[-1] / shape[-2] + + hw = q.size(1) + h, w = round(math.sqrt(hw * aspect_ratio)), round(math.sqrt(hw / aspect_ratio)) + + factor = (2 ** apply_to.index(model_chans)) if scale_depth else 1 + nh = random_divisor(h, latent_tile_size * factor, swap_size) + nw = random_divisor(w, latent_tile_size * factor, swap_size) + + if nh * nw > 1: + q = rearrange(q, "b (nh h nw w) c -> (b nh nw) (h w) c", h=h // nh, w=w // nw, nh=nh, nw=nw) + self.temp = (nh, nw, h, w) + return q, k, v + + return q, k, v + def hypertile_out(out, extra_options): + if self.temp is not None: + nh, nw, h, w = self.temp + self.temp = None + out = rearrange(out, "(b nh nw) hw c -> b nh nw hw c", nh=nh, nw=nw) + out = rearrange(out, "b nh nw (h w) c -> b (nh h nw w) c", h=h // nh, w=w // nw) + return out + + + m = model.clone() + m.set_model_attn1_patch(hypertile_in) + m.set_model_attn1_output_patch(hypertile_out) + return (m, ) + +NODE_CLASS_MAPPINGS = { + "HyperTile": HyperTile, +} diff --git a/ldm_patched/contrib/external_images.py b/ldm_patched/contrib/external_images.py new file mode 100644 index 0000000000000000000000000000000000000000..17e9c4978101644f66e7db18fbb4e5d00d29bcd2 --- /dev/null +++ b/ldm_patched/contrib/external_images.py @@ -0,0 +1,177 @@ +# https://github.com/comfyanonymous/ComfyUI/blob/master/nodes.py + +import ldm_patched.contrib.external +import ldm_patched.utils.path_utils +from ldm_patched.modules.args_parser import args + +from PIL import Image +from PIL.PngImagePlugin import PngInfo + +import numpy as np +import json +import os + +MAX_RESOLUTION = ldm_patched.contrib.external.MAX_RESOLUTION + +class ImageCrop: + @classmethod + def INPUT_TYPES(s): + return {"required": { "image": ("IMAGE",), + "width": ("INT", {"default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1}), + "height": ("INT", {"default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1}), + "x": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + "y": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + }} + RETURN_TYPES = ("IMAGE",) + FUNCTION = "crop" + + CATEGORY = "image/transform" + + def crop(self, image, width, height, x, y): + x = min(x, image.shape[2] - 1) + y = min(y, image.shape[1] - 1) + to_x = width + x + to_y = height + y + img = image[:,y:to_y, x:to_x, :] + return (img,) + +class RepeatImageBatch: + @classmethod + def INPUT_TYPES(s): + return {"required": { "image": ("IMAGE",), + "amount": ("INT", {"default": 1, "min": 1, "max": 64}), + }} + RETURN_TYPES = ("IMAGE",) + FUNCTION = "repeat" + + CATEGORY = "image/batch" + + def repeat(self, image, amount): + s = image.repeat((amount, 1,1,1)) + return (s,) + +class SaveAnimatedWEBP: + def __init__(self): + self.output_dir = ldm_patched.utils.path_utils.get_output_directory() + self.type = "output" + self.prefix_append = "" + + methods = {"default": 4, "fastest": 0, "slowest": 6} + @classmethod + def INPUT_TYPES(s): + return {"required": + {"images": ("IMAGE", ), + "filename_prefix": ("STRING", {"default": "ldm_patched"}), + "fps": ("FLOAT", {"default": 6.0, "min": 0.01, "max": 1000.0, "step": 0.01}), + "lossless": ("BOOLEAN", {"default": True}), + "quality": ("INT", {"default": 80, "min": 0, "max": 100}), + "method": (list(s.methods.keys()),), + # "num_frames": ("INT", {"default": 0, "min": 0, "max": 8192}), + }, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"}, + } + + RETURN_TYPES = () + FUNCTION = "save_images" + + OUTPUT_NODE = True + + CATEGORY = "image/animation" + + def save_images(self, images, fps, filename_prefix, lossless, quality, method, num_frames=0, prompt=None, extra_pnginfo=None): + method = self.methods.get(method) + filename_prefix += self.prefix_append + full_output_folder, filename, counter, subfolder, filename_prefix = ldm_patched.utils.path_utils.get_save_image_path(filename_prefix, self.output_dir, images[0].shape[1], images[0].shape[0]) + results = list() + pil_images = [] + for image in images: + i = 255. * image.cpu().numpy() + img = Image.fromarray(np.clip(i, 0, 255).astype(np.uint8)) + pil_images.append(img) + + metadata = pil_images[0].getexif() + if not args.disable_server_info: + if prompt is not None: + metadata[0x0110] = "prompt:{}".format(json.dumps(prompt)) + if extra_pnginfo is not None: + inital_exif = 0x010f + for x in extra_pnginfo: + metadata[inital_exif] = "{}:{}".format(x, json.dumps(extra_pnginfo[x])) + inital_exif -= 1 + + if num_frames == 0: + num_frames = len(pil_images) + + c = len(pil_images) + for i in range(0, c, num_frames): + file = f"{filename}_{counter:05}_.webp" + pil_images[i].save(os.path.join(full_output_folder, file), save_all=True, duration=int(1000.0/fps), append_images=pil_images[i + 1:i + num_frames], exif=metadata, lossless=lossless, quality=quality, method=method) + results.append({ + "filename": file, + "subfolder": subfolder, + "type": self.type + }) + counter += 1 + + animated = num_frames != 1 + return { "ui": { "images": results, "animated": (animated,) } } + +class SaveAnimatedPNG: + def __init__(self): + self.output_dir = ldm_patched.utils.path_utils.get_output_directory() + self.type = "output" + self.prefix_append = "" + + @classmethod + def INPUT_TYPES(s): + return {"required": + {"images": ("IMAGE", ), + "filename_prefix": ("STRING", {"default": "ldm_patched"}), + "fps": ("FLOAT", {"default": 6.0, "min": 0.01, "max": 1000.0, "step": 0.01}), + "compress_level": ("INT", {"default": 4, "min": 0, "max": 9}) + }, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"}, + } + + RETURN_TYPES = () + FUNCTION = "save_images" + + OUTPUT_NODE = True + + CATEGORY = "image/animation" + + def save_images(self, images, fps, compress_level, filename_prefix="ldm_patched", prompt=None, extra_pnginfo=None): + filename_prefix += self.prefix_append + full_output_folder, filename, counter, subfolder, filename_prefix = ldm_patched.utils.path_utils.get_save_image_path(filename_prefix, self.output_dir, images[0].shape[1], images[0].shape[0]) + results = list() + pil_images = [] + for image in images: + i = 255. * image.cpu().numpy() + img = Image.fromarray(np.clip(i, 0, 255).astype(np.uint8)) + pil_images.append(img) + + metadata = None + if not args.disable_server_info: + metadata = PngInfo() + if prompt is not None: + metadata.add(b"ldm_patched", "prompt".encode("latin-1", "strict") + b"\0" + json.dumps(prompt).encode("latin-1", "strict"), after_idat=True) + if extra_pnginfo is not None: + for x in extra_pnginfo: + metadata.add(b"ldm_patched", x.encode("latin-1", "strict") + b"\0" + json.dumps(extra_pnginfo[x]).encode("latin-1", "strict"), after_idat=True) + + file = f"{filename}_{counter:05}_.png" + pil_images[0].save(os.path.join(full_output_folder, file), pnginfo=metadata, compress_level=compress_level, save_all=True, duration=int(1000.0/fps), append_images=pil_images[1:]) + results.append({ + "filename": file, + "subfolder": subfolder, + "type": self.type + }) + + return { "ui": { "images": results, "animated": (True,)} } + +NODE_CLASS_MAPPINGS = { + "ImageCrop": ImageCrop, + "RepeatImageBatch": RepeatImageBatch, + "SaveAnimatedWEBP": SaveAnimatedWEBP, + "SaveAnimatedPNG": SaveAnimatedPNG, +} diff --git a/ldm_patched/contrib/external_latent.py b/ldm_patched/contrib/external_latent.py new file mode 100644 index 0000000000000000000000000000000000000000..6d753d0f7cfb019a1c84fe487e6a0890d903abbd --- /dev/null +++ b/ldm_patched/contrib/external_latent.py @@ -0,0 +1,157 @@ +# https://github.com/comfyanonymous/ComfyUI/blob/master/nodes.py + +import ldm_patched.modules.utils +import torch + +def reshape_latent_to(target_shape, latent): + if latent.shape[1:] != target_shape[1:]: + latent = ldm_patched.modules.utils.common_upscale(latent, target_shape[3], target_shape[2], "bilinear", "center") + return ldm_patched.modules.utils.repeat_to_batch_size(latent, target_shape[0]) + + +class LatentAdd: + @classmethod + def INPUT_TYPES(s): + return {"required": { "samples1": ("LATENT",), "samples2": ("LATENT",)}} + + RETURN_TYPES = ("LATENT",) + FUNCTION = "op" + + CATEGORY = "latent/advanced" + + def op(self, samples1, samples2): + samples_out = samples1.copy() + + s1 = samples1["samples"] + s2 = samples2["samples"] + + s2 = reshape_latent_to(s1.shape, s2) + samples_out["samples"] = s1 + s2 + return (samples_out,) + +class LatentSubtract: + @classmethod + def INPUT_TYPES(s): + return {"required": { "samples1": ("LATENT",), "samples2": ("LATENT",)}} + + RETURN_TYPES = ("LATENT",) + FUNCTION = "op" + + CATEGORY = "latent/advanced" + + def op(self, samples1, samples2): + samples_out = samples1.copy() + + s1 = samples1["samples"] + s2 = samples2["samples"] + + s2 = reshape_latent_to(s1.shape, s2) + samples_out["samples"] = s1 - s2 + return (samples_out,) + +class LatentMultiply: + @classmethod + def INPUT_TYPES(s): + return {"required": { "samples": ("LATENT",), + "multiplier": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), + }} + + RETURN_TYPES = ("LATENT",) + FUNCTION = "op" + + CATEGORY = "latent/advanced" + + def op(self, samples, multiplier): + samples_out = samples.copy() + + s1 = samples["samples"] + samples_out["samples"] = s1 * multiplier + return (samples_out,) + +class LatentInterpolate: + @classmethod + def INPUT_TYPES(s): + return {"required": { "samples1": ("LATENT",), + "samples2": ("LATENT",), + "ratio": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + }} + + RETURN_TYPES = ("LATENT",) + FUNCTION = "op" + + CATEGORY = "latent/advanced" + + def op(self, samples1, samples2, ratio): + samples_out = samples1.copy() + + s1 = samples1["samples"] + s2 = samples2["samples"] + + s2 = reshape_latent_to(s1.shape, s2) + + m1 = torch.linalg.vector_norm(s1, dim=(1)) + m2 = torch.linalg.vector_norm(s2, dim=(1)) + + s1 = torch.nan_to_num(s1 / m1) + s2 = torch.nan_to_num(s2 / m2) + + t = (s1 * ratio + s2 * (1.0 - ratio)) + mt = torch.linalg.vector_norm(t, dim=(1)) + st = torch.nan_to_num(t / mt) + + samples_out["samples"] = st * (m1 * ratio + m2 * (1.0 - ratio)) + return (samples_out,) + +class LatentBatch: + @classmethod + def INPUT_TYPES(s): + return {"required": { "samples1": ("LATENT",), "samples2": ("LATENT",)}} + + RETURN_TYPES = ("LATENT",) + FUNCTION = "batch" + + CATEGORY = "latent/batch" + + def batch(self, samples1, samples2): + samples_out = samples1.copy() + s1 = samples1["samples"] + s2 = samples2["samples"] + + if s1.shape[1:] != s2.shape[1:]: + s2 = ldm_patched.modules.utils.common_upscale(s2, s1.shape[3], s1.shape[2], "bilinear", "center") + s = torch.cat((s1, s2), dim=0) + samples_out["samples"] = s + samples_out["batch_index"] = samples1.get("batch_index", [x for x in range(0, s1.shape[0])]) + samples2.get("batch_index", [x for x in range(0, s2.shape[0])]) + return (samples_out,) + +class LatentBatchSeedBehavior: + @classmethod + def INPUT_TYPES(s): + return {"required": { "samples": ("LATENT",), + "seed_behavior": (["random", "fixed"],),}} + + RETURN_TYPES = ("LATENT",) + FUNCTION = "op" + + CATEGORY = "latent/advanced" + + def op(self, samples, seed_behavior): + samples_out = samples.copy() + latent = samples["samples"] + if seed_behavior == "random": + if 'batch_index' in samples_out: + samples_out.pop('batch_index') + elif seed_behavior == "fixed": + batch_number = samples_out.get("batch_index", [0])[0] + samples_out["batch_index"] = [batch_number] * latent.shape[0] + + return (samples_out,) + +NODE_CLASS_MAPPINGS = { + "LatentAdd": LatentAdd, + "LatentSubtract": LatentSubtract, + "LatentMultiply": LatentMultiply, + "LatentInterpolate": LatentInterpolate, + "LatentBatch": LatentBatch, + "LatentBatchSeedBehavior": LatentBatchSeedBehavior, +} diff --git a/ldm_patched/contrib/external_mask.py b/ldm_patched/contrib/external_mask.py new file mode 100644 index 0000000000000000000000000000000000000000..a86a7fe69b67f1df14672bd480060142d4385ed6 --- /dev/null +++ b/ldm_patched/contrib/external_mask.py @@ -0,0 +1,365 @@ +# https://github.com/comfyanonymous/ComfyUI/blob/master/nodes.py + +import numpy as np +import scipy.ndimage +import torch +import ldm_patched.modules.utils + +from ldm_patched.contrib.external import MAX_RESOLUTION + +def composite(destination, source, x, y, mask = None, multiplier = 8, resize_source = False): + source = source.to(destination.device) + if resize_source: + source = torch.nn.functional.interpolate(source, size=(destination.shape[2], destination.shape[3]), mode="bilinear") + + source = ldm_patched.modules.utils.repeat_to_batch_size(source, destination.shape[0]) + + x = max(-source.shape[3] * multiplier, min(x, destination.shape[3] * multiplier)) + y = max(-source.shape[2] * multiplier, min(y, destination.shape[2] * multiplier)) + + left, top = (x // multiplier, y // multiplier) + right, bottom = (left + source.shape[3], top + source.shape[2],) + + if mask is None: + mask = torch.ones_like(source) + else: + mask = mask.to(destination.device, copy=True) + mask = torch.nn.functional.interpolate(mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])), size=(source.shape[2], source.shape[3]), mode="bilinear") + mask = ldm_patched.modules.utils.repeat_to_batch_size(mask, source.shape[0]) + + # calculate the bounds of the source that will be overlapping the destination + # this prevents the source trying to overwrite latent pixels that are out of bounds + # of the destination + visible_width, visible_height = (destination.shape[3] - left + min(0, x), destination.shape[2] - top + min(0, y),) + + mask = mask[:, :, :visible_height, :visible_width] + inverse_mask = torch.ones_like(mask) - mask + + source_portion = mask * source[:, :, :visible_height, :visible_width] + destination_portion = inverse_mask * destination[:, :, top:bottom, left:right] + + destination[:, :, top:bottom, left:right] = source_portion + destination_portion + return destination + +class LatentCompositeMasked: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "destination": ("LATENT",), + "source": ("LATENT",), + "x": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), + "y": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), + "resize_source": ("BOOLEAN", {"default": False}), + }, + "optional": { + "mask": ("MASK",), + } + } + RETURN_TYPES = ("LATENT",) + FUNCTION = "composite" + + CATEGORY = "latent" + + def composite(self, destination, source, x, y, resize_source, mask = None): + output = destination.copy() + destination = destination["samples"].clone() + source = source["samples"] + output["samples"] = composite(destination, source, x, y, mask, 8, resize_source) + return (output,) + +class ImageCompositeMasked: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "destination": ("IMAGE",), + "source": ("IMAGE",), + "x": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + "y": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + "resize_source": ("BOOLEAN", {"default": False}), + }, + "optional": { + "mask": ("MASK",), + } + } + RETURN_TYPES = ("IMAGE",) + FUNCTION = "composite" + + CATEGORY = "image" + + def composite(self, destination, source, x, y, resize_source, mask = None): + destination = destination.clone().movedim(-1, 1) + output = composite(destination, source.movedim(-1, 1), x, y, mask, 1, resize_source).movedim(1, -1) + return (output,) + +class MaskToImage: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "mask": ("MASK",), + } + } + + CATEGORY = "mask" + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "mask_to_image" + + def mask_to_image(self, mask): + result = mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])).movedim(1, -1).expand(-1, -1, -1, 3) + return (result,) + +class ImageToMask: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "channel": (["red", "green", "blue", "alpha"],), + } + } + + CATEGORY = "mask" + + RETURN_TYPES = ("MASK",) + FUNCTION = "image_to_mask" + + def image_to_mask(self, image, channel): + channels = ["red", "green", "blue", "alpha"] + mask = image[:, :, :, channels.index(channel)] + return (mask,) + +class ImageColorToMask: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "color": ("INT", {"default": 0, "min": 0, "max": 0xFFFFFF, "step": 1, "display": "color"}), + } + } + + CATEGORY = "mask" + + RETURN_TYPES = ("MASK",) + FUNCTION = "image_to_mask" + + def image_to_mask(self, image, color): + temp = (torch.clamp(image, 0, 1.0) * 255.0).round().to(torch.int) + temp = torch.bitwise_left_shift(temp[:,:,:,0], 16) + torch.bitwise_left_shift(temp[:,:,:,1], 8) + temp[:,:,:,2] + mask = torch.where(temp == color, 255, 0).float() + return (mask,) + +class SolidMask: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "value": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "width": ("INT", {"default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1}), + "height": ("INT", {"default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1}), + } + } + + CATEGORY = "mask" + + RETURN_TYPES = ("MASK",) + + FUNCTION = "solid" + + def solid(self, value, width, height): + out = torch.full((1, height, width), value, dtype=torch.float32, device="cpu") + return (out,) + +class InvertMask: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "mask": ("MASK",), + } + } + + CATEGORY = "mask" + + RETURN_TYPES = ("MASK",) + + FUNCTION = "invert" + + def invert(self, mask): + out = 1.0 - mask + return (out,) + +class CropMask: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "mask": ("MASK",), + "x": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + "y": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + "width": ("INT", {"default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1}), + "height": ("INT", {"default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1}), + } + } + + CATEGORY = "mask" + + RETURN_TYPES = ("MASK",) + + FUNCTION = "crop" + + def crop(self, mask, x, y, width, height): + mask = mask.reshape((-1, mask.shape[-2], mask.shape[-1])) + out = mask[:, y:y + height, x:x + width] + return (out,) + +class MaskComposite: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "destination": ("MASK",), + "source": ("MASK",), + "x": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + "y": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + "operation": (["multiply", "add", "subtract", "and", "or", "xor"],), + } + } + + CATEGORY = "mask" + + RETURN_TYPES = ("MASK",) + + FUNCTION = "combine" + + def combine(self, destination, source, x, y, operation): + output = destination.reshape((-1, destination.shape[-2], destination.shape[-1])).clone() + source = source.reshape((-1, source.shape[-2], source.shape[-1])) + + left, top = (x, y,) + right, bottom = (min(left + source.shape[-1], destination.shape[-1]), min(top + source.shape[-2], destination.shape[-2])) + visible_width, visible_height = (right - left, bottom - top,) + + source_portion = source[:, :visible_height, :visible_width] + destination_portion = destination[:, top:bottom, left:right] + + if operation == "multiply": + output[:, top:bottom, left:right] = destination_portion * source_portion + elif operation == "add": + output[:, top:bottom, left:right] = destination_portion + source_portion + elif operation == "subtract": + output[:, top:bottom, left:right] = destination_portion - source_portion + elif operation == "and": + output[:, top:bottom, left:right] = torch.bitwise_and(destination_portion.round().bool(), source_portion.round().bool()).float() + elif operation == "or": + output[:, top:bottom, left:right] = torch.bitwise_or(destination_portion.round().bool(), source_portion.round().bool()).float() + elif operation == "xor": + output[:, top:bottom, left:right] = torch.bitwise_xor(destination_portion.round().bool(), source_portion.round().bool()).float() + + output = torch.clamp(output, 0.0, 1.0) + + return (output,) + +class FeatherMask: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "mask": ("MASK",), + "left": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + "top": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + "right": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + "bottom": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + } + } + + CATEGORY = "mask" + + RETURN_TYPES = ("MASK",) + + FUNCTION = "feather" + + def feather(self, mask, left, top, right, bottom): + output = mask.reshape((-1, mask.shape[-2], mask.shape[-1])).clone() + + left = min(left, output.shape[-1]) + right = min(right, output.shape[-1]) + top = min(top, output.shape[-2]) + bottom = min(bottom, output.shape[-2]) + + for x in range(left): + feather_rate = (x + 1.0) / left + output[:, :, x] *= feather_rate + + for x in range(right): + feather_rate = (x + 1) / right + output[:, :, -x] *= feather_rate + + for y in range(top): + feather_rate = (y + 1) / top + output[:, y, :] *= feather_rate + + for y in range(bottom): + feather_rate = (y + 1) / bottom + output[:, -y, :] *= feather_rate + + return (output,) + +class GrowMask: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "mask": ("MASK",), + "expand": ("INT", {"default": 0, "min": -MAX_RESOLUTION, "max": MAX_RESOLUTION, "step": 1}), + "tapered_corners": ("BOOLEAN", {"default": True}), + }, + } + + CATEGORY = "mask" + + RETURN_TYPES = ("MASK",) + + FUNCTION = "expand_mask" + + def expand_mask(self, mask, expand, tapered_corners): + c = 0 if tapered_corners else 1 + kernel = np.array([[c, 1, c], + [1, 1, 1], + [c, 1, c]]) + mask = mask.reshape((-1, mask.shape[-2], mask.shape[-1])) + out = [] + for m in mask: + output = m.numpy() + for _ in range(abs(expand)): + if expand < 0: + output = scipy.ndimage.grey_erosion(output, footprint=kernel) + else: + output = scipy.ndimage.grey_dilation(output, footprint=kernel) + output = torch.from_numpy(output) + out.append(output) + return (torch.stack(out, dim=0),) + + + +NODE_CLASS_MAPPINGS = { + "LatentCompositeMasked": LatentCompositeMasked, + "ImageCompositeMasked": ImageCompositeMasked, + "MaskToImage": MaskToImage, + "ImageToMask": ImageToMask, + "ImageColorToMask": ImageColorToMask, + "SolidMask": SolidMask, + "InvertMask": InvertMask, + "CropMask": CropMask, + "MaskComposite": MaskComposite, + "FeatherMask": FeatherMask, + "GrowMask": GrowMask, +} + +NODE_DISPLAY_NAME_MAPPINGS = { + "ImageToMask": "Convert Image to Mask", + "MaskToImage": "Convert Mask to Image", +} diff --git a/ldm_patched/contrib/external_model_advanced.py b/ldm_patched/contrib/external_model_advanced.py new file mode 100644 index 0000000000000000000000000000000000000000..03a2f0454c74885371e7d9cc843c53a6f8bbb830 --- /dev/null +++ b/ldm_patched/contrib/external_model_advanced.py @@ -0,0 +1,177 @@ +# https://github.com/comfyanonymous/ComfyUI/blob/master/nodes.py + +import ldm_patched.utils.path_utils +import ldm_patched.modules.sd +import ldm_patched.modules.model_sampling +import torch + +class LCM(ldm_patched.modules.model_sampling.EPS): + def calculate_denoised(self, sigma, model_output, model_input): + timestep = self.timestep(sigma).view(sigma.shape[:1] + (1,) * (model_output.ndim - 1)) + sigma = sigma.view(sigma.shape[:1] + (1,) * (model_output.ndim - 1)) + x0 = model_input - model_output * sigma + + sigma_data = 0.5 + scaled_timestep = timestep * 10.0 #timestep_scaling + + c_skip = sigma_data**2 / (scaled_timestep**2 + sigma_data**2) + c_out = scaled_timestep / (scaled_timestep**2 + sigma_data**2) ** 0.5 + + return c_out * x0 + c_skip * model_input + +class ModelSamplingDiscreteDistilled(ldm_patched.modules.model_sampling.ModelSamplingDiscrete): + original_timesteps = 50 + + def __init__(self, model_config=None): + super().__init__(model_config) + + self.skip_steps = self.num_timesteps // self.original_timesteps + + sigmas_valid = torch.zeros((self.original_timesteps), dtype=torch.float32) + for x in range(self.original_timesteps): + sigmas_valid[self.original_timesteps - 1 - x] = self.sigmas[self.num_timesteps - 1 - x * self.skip_steps] + + self.set_sigmas(sigmas_valid) + + def timestep(self, sigma): + log_sigma = sigma.log() + dists = log_sigma.to(self.log_sigmas.device) - self.log_sigmas[:, None] + return (dists.abs().argmin(dim=0).view(sigma.shape) * self.skip_steps + (self.skip_steps - 1)).to(sigma.device) + + def sigma(self, timestep): + t = torch.clamp(((timestep.float().to(self.log_sigmas.device) - (self.skip_steps - 1)) / self.skip_steps).float(), min=0, max=(len(self.sigmas) - 1)) + low_idx = t.floor().long() + high_idx = t.ceil().long() + w = t.frac() + log_sigma = (1 - w) * self.log_sigmas[low_idx] + w * self.log_sigmas[high_idx] + return log_sigma.exp().to(timestep.device) + + +def rescale_zero_terminal_snr_sigmas(sigmas): + alphas_cumprod = 1 / ((sigmas * sigmas) + 1) + alphas_bar_sqrt = alphas_cumprod.sqrt() + + # Store old values. + alphas_bar_sqrt_0 = alphas_bar_sqrt[0].clone() + alphas_bar_sqrt_T = alphas_bar_sqrt[-1].clone() + + # Shift so the last timestep is zero. + alphas_bar_sqrt -= (alphas_bar_sqrt_T) + + # Scale so the first timestep is back to the old value. + alphas_bar_sqrt *= alphas_bar_sqrt_0 / (alphas_bar_sqrt_0 - alphas_bar_sqrt_T) + + # Convert alphas_bar_sqrt to betas + alphas_bar = alphas_bar_sqrt**2 # Revert sqrt + alphas_bar[-1] = 4.8973451890853435e-08 + return ((1 - alphas_bar) / alphas_bar) ** 0.5 + +class ModelSamplingDiscrete: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "sampling": (["eps", "v_prediction", "lcm"],), + "zsnr": ("BOOLEAN", {"default": False}), + }} + + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "advanced/model" + + def patch(self, model, sampling, zsnr): + m = model.clone() + + sampling_base = ldm_patched.modules.model_sampling.ModelSamplingDiscrete + if sampling == "eps": + sampling_type = ldm_patched.modules.model_sampling.EPS + elif sampling == "v_prediction": + sampling_type = ldm_patched.modules.model_sampling.V_PREDICTION + elif sampling == "lcm": + sampling_type = LCM + sampling_base = ModelSamplingDiscreteDistilled + + class ModelSamplingAdvanced(sampling_base, sampling_type): + pass + + model_sampling = ModelSamplingAdvanced(model.model.model_config) + if zsnr: + model_sampling.set_sigmas(rescale_zero_terminal_snr_sigmas(model_sampling.sigmas)) + + m.add_object_patch("model_sampling", model_sampling) + return (m, ) + +class ModelSamplingContinuousEDM: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "sampling": (["v_prediction", "eps"],), + "sigma_max": ("FLOAT", {"default": 120.0, "min": 0.0, "max": 1000.0, "step":0.001, "round": False}), + "sigma_min": ("FLOAT", {"default": 0.002, "min": 0.0, "max": 1000.0, "step":0.001, "round": False}), + }} + + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "advanced/model" + + def patch(self, model, sampling, sigma_max, sigma_min): + m = model.clone() + + if sampling == "eps": + sampling_type = ldm_patched.modules.model_sampling.EPS + elif sampling == "v_prediction": + sampling_type = ldm_patched.modules.model_sampling.V_PREDICTION + + class ModelSamplingAdvanced(ldm_patched.modules.model_sampling.ModelSamplingContinuousEDM, sampling_type): + pass + + model_sampling = ModelSamplingAdvanced(model.model.model_config) + model_sampling.set_sigma_range(sigma_min, sigma_max) + m.add_object_patch("model_sampling", model_sampling) + return (m, ) + +class RescaleCFG: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "multiplier": ("FLOAT", {"default": 0.7, "min": 0.0, "max": 1.0, "step": 0.01}), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "advanced/model" + + def patch(self, model, multiplier): + def rescale_cfg(args): + cond = args["cond"] + uncond = args["uncond"] + cond_scale = args["cond_scale"] + sigma = args["sigma"] + sigma = sigma.view(sigma.shape[:1] + (1,) * (cond.ndim - 1)) + x_orig = args["input"] + + #rescale cfg has to be done on v-pred model output + x = x_orig / (sigma * sigma + 1.0) + cond = ((x - (x_orig - cond)) * (sigma ** 2 + 1.0) ** 0.5) / (sigma) + uncond = ((x - (x_orig - uncond)) * (sigma ** 2 + 1.0) ** 0.5) / (sigma) + + #rescalecfg + x_cfg = uncond + cond_scale * (cond - uncond) + ro_pos = torch.std(cond, dim=(1,2,3), keepdim=True) + ro_cfg = torch.std(x_cfg, dim=(1,2,3), keepdim=True) + + x_rescaled = x_cfg * (ro_pos / ro_cfg) + x_final = multiplier * x_rescaled + (1.0 - multiplier) * x_cfg + + return x_orig - (x - x_final * sigma / (sigma * sigma + 1.0) ** 0.5) + + m = model.clone() + m.set_model_sampler_cfg_function(rescale_cfg) + return (m, ) + +NODE_CLASS_MAPPINGS = { + "ModelSamplingDiscrete": ModelSamplingDiscrete, + "ModelSamplingContinuousEDM": ModelSamplingContinuousEDM, + "RescaleCFG": RescaleCFG, +} diff --git a/ldm_patched/contrib/external_model_downscale.py b/ldm_patched/contrib/external_model_downscale.py new file mode 100644 index 0000000000000000000000000000000000000000..4f1da54de25425dea2c5eeabe10c2342cc1e3739 --- /dev/null +++ b/ldm_patched/contrib/external_model_downscale.py @@ -0,0 +1,55 @@ +# https://github.com/comfyanonymous/ComfyUI/blob/master/nodes.py + +import torch +import ldm_patched.modules.utils + +class PatchModelAddDownscale: + upscale_methods = ["bicubic", "nearest-exact", "bilinear", "area", "bislerp"] + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "block_number": ("INT", {"default": 3, "min": 1, "max": 32, "step": 1}), + "downscale_factor": ("FLOAT", {"default": 2.0, "min": 0.1, "max": 9.0, "step": 0.001}), + "start_percent": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001}), + "end_percent": ("FLOAT", {"default": 0.35, "min": 0.0, "max": 1.0, "step": 0.001}), + "downscale_after_skip": ("BOOLEAN", {"default": True}), + "downscale_method": (s.upscale_methods,), + "upscale_method": (s.upscale_methods,), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "_for_testing" + + def patch(self, model, block_number, downscale_factor, start_percent, end_percent, downscale_after_skip, downscale_method, upscale_method): + sigma_start = model.model.model_sampling.percent_to_sigma(start_percent) + sigma_end = model.model.model_sampling.percent_to_sigma(end_percent) + + def input_block_patch(h, transformer_options): + if transformer_options["block"][1] == block_number: + sigma = transformer_options["sigmas"][0].item() + if sigma <= sigma_start and sigma >= sigma_end: + h = ldm_patched.modules.utils.common_upscale(h, round(h.shape[-1] * (1.0 / downscale_factor)), round(h.shape[-2] * (1.0 / downscale_factor)), downscale_method, "disabled") + return h + + def output_block_patch(h, hsp, transformer_options): + if h.shape[2] != hsp.shape[2]: + h = ldm_patched.modules.utils.common_upscale(h, hsp.shape[-1], hsp.shape[-2], upscale_method, "disabled") + return h, hsp + + m = model.clone() + if downscale_after_skip: + m.set_model_input_block_patch_after_skip(input_block_patch) + else: + m.set_model_input_block_patch(input_block_patch) + m.set_model_output_block_patch(output_block_patch) + return (m, ) + +NODE_CLASS_MAPPINGS = { + "PatchModelAddDownscale": PatchModelAddDownscale, +} + +NODE_DISPLAY_NAME_MAPPINGS = { + # Sampling + "PatchModelAddDownscale": "PatchModelAddDownscale (Kohya Deep Shrink)", +} diff --git a/ldm_patched/contrib/external_model_merging.py b/ldm_patched/contrib/external_model_merging.py new file mode 100644 index 0000000000000000000000000000000000000000..ae8145d4f698f5286d6f69962a25d06d78203614 --- /dev/null +++ b/ldm_patched/contrib/external_model_merging.py @@ -0,0 +1,286 @@ +# https://github.com/comfyanonymous/ComfyUI/blob/master/nodes.py + +import ldm_patched.modules.sd +import ldm_patched.modules.utils +import ldm_patched.modules.model_base +import ldm_patched.modules.model_management + +import ldm_patched.utils.path_utils +import json +import os + +from ldm_patched.modules.args_parser import args + +class ModelMergeSimple: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model1": ("MODEL",), + "model2": ("MODEL",), + "ratio": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "merge" + + CATEGORY = "advanced/model_merging" + + def merge(self, model1, model2, ratio): + m = model1.clone() + kp = model2.get_key_patches("diffusion_model.") + for k in kp: + m.add_patches({k: kp[k]}, 1.0 - ratio, ratio) + return (m, ) + +class ModelSubtract: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model1": ("MODEL",), + "model2": ("MODEL",), + "multiplier": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "merge" + + CATEGORY = "advanced/model_merging" + + def merge(self, model1, model2, multiplier): + m = model1.clone() + kp = model2.get_key_patches("diffusion_model.") + for k in kp: + m.add_patches({k: kp[k]}, - multiplier, multiplier) + return (m, ) + +class ModelAdd: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model1": ("MODEL",), + "model2": ("MODEL",), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "merge" + + CATEGORY = "advanced/model_merging" + + def merge(self, model1, model2): + m = model1.clone() + kp = model2.get_key_patches("diffusion_model.") + for k in kp: + m.add_patches({k: kp[k]}, 1.0, 1.0) + return (m, ) + + +class CLIPMergeSimple: + @classmethod + def INPUT_TYPES(s): + return {"required": { "clip1": ("CLIP",), + "clip2": ("CLIP",), + "ratio": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + }} + RETURN_TYPES = ("CLIP",) + FUNCTION = "merge" + + CATEGORY = "advanced/model_merging" + + def merge(self, clip1, clip2, ratio): + m = clip1.clone() + kp = clip2.get_key_patches() + for k in kp: + if k.endswith(".position_ids") or k.endswith(".logit_scale"): + continue + m.add_patches({k: kp[k]}, 1.0 - ratio, ratio) + return (m, ) + +class ModelMergeBlocks: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model1": ("MODEL",), + "model2": ("MODEL",), + "input": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "middle": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "out": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}) + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "merge" + + CATEGORY = "advanced/model_merging" + + def merge(self, model1, model2, **kwargs): + m = model1.clone() + kp = model2.get_key_patches("diffusion_model.") + default_ratio = next(iter(kwargs.values())) + + for k in kp: + ratio = default_ratio + k_unet = k[len("diffusion_model."):] + + last_arg_size = 0 + for arg in kwargs: + if k_unet.startswith(arg) and last_arg_size < len(arg): + ratio = kwargs[arg] + last_arg_size = len(arg) + + m.add_patches({k: kp[k]}, 1.0 - ratio, ratio) + return (m, ) + +def save_checkpoint(model, clip=None, vae=None, clip_vision=None, filename_prefix=None, output_dir=None, prompt=None, extra_pnginfo=None): + full_output_folder, filename, counter, subfolder, filename_prefix = ldm_patched.utils.path_utils.get_save_image_path(filename_prefix, output_dir) + prompt_info = "" + if prompt is not None: + prompt_info = json.dumps(prompt) + + metadata = {} + + enable_modelspec = True + if isinstance(model.model, ldm_patched.modules.model_base.SDXL): + metadata["modelspec.architecture"] = "stable-diffusion-xl-v1-base" + elif isinstance(model.model, ldm_patched.modules.model_base.SDXLRefiner): + metadata["modelspec.architecture"] = "stable-diffusion-xl-v1-refiner" + else: + enable_modelspec = False + + if enable_modelspec: + metadata["modelspec.sai_model_spec"] = "1.0.0" + metadata["modelspec.implementation"] = "sgm" + metadata["modelspec.title"] = "{} {}".format(filename, counter) + + #TODO: + # "stable-diffusion-v1", "stable-diffusion-v1-inpainting", "stable-diffusion-v2-512", + # "stable-diffusion-v2-768-v", "stable-diffusion-v2-unclip-l", "stable-diffusion-v2-unclip-h", + # "v2-inpainting" + + if model.model.model_type == ldm_patched.modules.model_base.ModelType.EPS: + metadata["modelspec.predict_key"] = "epsilon" + elif model.model.model_type == ldm_patched.modules.model_base.ModelType.V_PREDICTION: + metadata["modelspec.predict_key"] = "v" + + if not args.disable_server_info: + metadata["prompt"] = prompt_info + if extra_pnginfo is not None: + for x in extra_pnginfo: + metadata[x] = json.dumps(extra_pnginfo[x]) + + output_checkpoint = f"{filename}_{counter:05}_.safetensors" + output_checkpoint = os.path.join(full_output_folder, output_checkpoint) + + ldm_patched.modules.sd.save_checkpoint(output_checkpoint, model, clip, vae, clip_vision, metadata=metadata) + +class CheckpointSave: + def __init__(self): + self.output_dir = ldm_patched.utils.path_utils.get_output_directory() + + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "clip": ("CLIP",), + "vae": ("VAE",), + "filename_prefix": ("STRING", {"default": "checkpoints/ldm_patched"}),}, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"},} + RETURN_TYPES = () + FUNCTION = "save" + OUTPUT_NODE = True + + CATEGORY = "advanced/model_merging" + + def save(self, model, clip, vae, filename_prefix, prompt=None, extra_pnginfo=None): + save_checkpoint(model, clip=clip, vae=vae, filename_prefix=filename_prefix, output_dir=self.output_dir, prompt=prompt, extra_pnginfo=extra_pnginfo) + return {} + +class CLIPSave: + def __init__(self): + self.output_dir = ldm_patched.utils.path_utils.get_output_directory() + + @classmethod + def INPUT_TYPES(s): + return {"required": { "clip": ("CLIP",), + "filename_prefix": ("STRING", {"default": "clip/ldm_patched"}),}, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"},} + RETURN_TYPES = () + FUNCTION = "save" + OUTPUT_NODE = True + + CATEGORY = "advanced/model_merging" + + def save(self, clip, filename_prefix, prompt=None, extra_pnginfo=None): + prompt_info = "" + if prompt is not None: + prompt_info = json.dumps(prompt) + + metadata = {} + if not args.disable_server_info: + metadata["prompt"] = prompt_info + if extra_pnginfo is not None: + for x in extra_pnginfo: + metadata[x] = json.dumps(extra_pnginfo[x]) + + ldm_patched.modules.model_management.load_models_gpu([clip.load_model()]) + clip_sd = clip.get_sd() + + for prefix in ["clip_l.", "clip_g.", ""]: + k = list(filter(lambda a: a.startswith(prefix), clip_sd.keys())) + current_clip_sd = {} + for x in k: + current_clip_sd[x] = clip_sd.pop(x) + if len(current_clip_sd) == 0: + continue + + p = prefix[:-1] + replace_prefix = {} + filename_prefix_ = filename_prefix + if len(p) > 0: + filename_prefix_ = "{}_{}".format(filename_prefix_, p) + replace_prefix[prefix] = "" + replace_prefix["transformer."] = "" + + full_output_folder, filename, counter, subfolder, filename_prefix_ = ldm_patched.utils.path_utils.get_save_image_path(filename_prefix_, self.output_dir) + + output_checkpoint = f"{filename}_{counter:05}_.safetensors" + output_checkpoint = os.path.join(full_output_folder, output_checkpoint) + + current_clip_sd = ldm_patched.modules.utils.state_dict_prefix_replace(current_clip_sd, replace_prefix) + + ldm_patched.modules.utils.save_torch_file(current_clip_sd, output_checkpoint, metadata=metadata) + return {} + +class VAESave: + def __init__(self): + self.output_dir = ldm_patched.utils.path_utils.get_output_directory() + + @classmethod + def INPUT_TYPES(s): + return {"required": { "vae": ("VAE",), + "filename_prefix": ("STRING", {"default": "vae/ldm_patched_vae"}),}, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"},} + RETURN_TYPES = () + FUNCTION = "save" + OUTPUT_NODE = True + + CATEGORY = "advanced/model_merging" + + def save(self, vae, filename_prefix, prompt=None, extra_pnginfo=None): + full_output_folder, filename, counter, subfolder, filename_prefix = ldm_patched.utils.path_utils.get_save_image_path(filename_prefix, self.output_dir) + prompt_info = "" + if prompt is not None: + prompt_info = json.dumps(prompt) + + metadata = {} + if not args.disable_server_info: + metadata["prompt"] = prompt_info + if extra_pnginfo is not None: + for x in extra_pnginfo: + metadata[x] = json.dumps(extra_pnginfo[x]) + + output_checkpoint = f"{filename}_{counter:05}_.safetensors" + output_checkpoint = os.path.join(full_output_folder, output_checkpoint) + + ldm_patched.modules.utils.save_torch_file(vae.get_sd(), output_checkpoint, metadata=metadata) + return {} + +NODE_CLASS_MAPPINGS = { + "ModelMergeSimple": ModelMergeSimple, + "ModelMergeBlocks": ModelMergeBlocks, + "ModelMergeSubtract": ModelSubtract, + "ModelMergeAdd": ModelAdd, + "CheckpointSave": CheckpointSave, + "CLIPMergeSimple": CLIPMergeSimple, + "CLIPSave": CLIPSave, + "VAESave": VAESave, +} diff --git a/ldm_patched/contrib/external_perpneg.py b/ldm_patched/contrib/external_perpneg.py new file mode 100644 index 0000000000000000000000000000000000000000..ec91681fea1261767e9c2ae68e4c8a4bb6489be6 --- /dev/null +++ b/ldm_patched/contrib/external_perpneg.py @@ -0,0 +1,57 @@ +# https://github.com/comfyanonymous/ComfyUI/blob/master/nodes.py + +import torch +import ldm_patched.modules.model_management +import ldm_patched.modules.sample +import ldm_patched.modules.samplers +import ldm_patched.modules.utils + + +class PerpNeg: + @classmethod + def INPUT_TYPES(s): + return {"required": {"model": ("MODEL", ), + "empty_conditioning": ("CONDITIONING", ), + "neg_scale": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0}), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "_for_testing" + + def patch(self, model, empty_conditioning, neg_scale): + m = model.clone() + nocond = ldm_patched.modules.sample.convert_cond(empty_conditioning) + + def cfg_function(args): + model = args["model"] + noise_pred_pos = args["cond_denoised"] + noise_pred_neg = args["uncond_denoised"] + cond_scale = args["cond_scale"] + x = args["input"] + sigma = args["sigma"] + model_options = args["model_options"] + nocond_processed = ldm_patched.modules.samplers.encode_model_conds(model.extra_conds, nocond, x, x.device, "negative") + + (noise_pred_nocond, _) = ldm_patched.modules.samplers.calc_cond_uncond_batch(model, nocond_processed, None, x, sigma, model_options) + + pos = noise_pred_pos - noise_pred_nocond + neg = noise_pred_neg - noise_pred_nocond + perp = ((torch.mul(pos, neg).sum())/(torch.norm(neg)**2)) * neg + perp_neg = perp * neg_scale + cfg_result = noise_pred_nocond + cond_scale*(pos - perp_neg) + cfg_result = x - cfg_result + return cfg_result + + m.set_model_sampler_cfg_function(cfg_function) + + return (m, ) + + +NODE_CLASS_MAPPINGS = { + "PerpNeg": PerpNeg, +} + +NODE_DISPLAY_NAME_MAPPINGS = { + "PerpNeg": "Perp-Neg", +} diff --git a/ldm_patched/contrib/external_photomaker.py b/ldm_patched/contrib/external_photomaker.py new file mode 100644 index 0000000000000000000000000000000000000000..cc7f67100670d6407531d56f232517a9216e9dcf --- /dev/null +++ b/ldm_patched/contrib/external_photomaker.py @@ -0,0 +1,189 @@ +# https://github.com/comfyanonymous/ComfyUI/blob/master/nodes.py + +import torch +import torch.nn as nn +import ldm_patched.utils.path_utils +import ldm_patched.modules.clip_model +import ldm_patched.modules.clip_vision +import ldm_patched.modules.ops + +# code for model from: https://github.com/TencentARC/PhotoMaker/blob/main/photomaker/model.py under Apache License Version 2.0 +VISION_CONFIG_DICT = { + "hidden_size": 1024, + "image_size": 224, + "intermediate_size": 4096, + "num_attention_heads": 16, + "num_channels": 3, + "num_hidden_layers": 24, + "patch_size": 14, + "projection_dim": 768, + "hidden_act": "quick_gelu", +} + +class MLP(nn.Module): + def __init__(self, in_dim, out_dim, hidden_dim, use_residual=True, operations=ldm_patched.modules.ops): + super().__init__() + if use_residual: + assert in_dim == out_dim + self.layernorm = operations.LayerNorm(in_dim) + self.fc1 = operations.Linear(in_dim, hidden_dim) + self.fc2 = operations.Linear(hidden_dim, out_dim) + self.use_residual = use_residual + self.act_fn = nn.GELU() + + def forward(self, x): + residual = x + x = self.layernorm(x) + x = self.fc1(x) + x = self.act_fn(x) + x = self.fc2(x) + if self.use_residual: + x = x + residual + return x + + +class FuseModule(nn.Module): + def __init__(self, embed_dim, operations): + super().__init__() + self.mlp1 = MLP(embed_dim * 2, embed_dim, embed_dim, use_residual=False, operations=operations) + self.mlp2 = MLP(embed_dim, embed_dim, embed_dim, use_residual=True, operations=operations) + self.layer_norm = operations.LayerNorm(embed_dim) + + def fuse_fn(self, prompt_embeds, id_embeds): + stacked_id_embeds = torch.cat([prompt_embeds, id_embeds], dim=-1) + stacked_id_embeds = self.mlp1(stacked_id_embeds) + prompt_embeds + stacked_id_embeds = self.mlp2(stacked_id_embeds) + stacked_id_embeds = self.layer_norm(stacked_id_embeds) + return stacked_id_embeds + + def forward( + self, + prompt_embeds, + id_embeds, + class_tokens_mask, + ) -> torch.Tensor: + # id_embeds shape: [b, max_num_inputs, 1, 2048] + id_embeds = id_embeds.to(prompt_embeds.dtype) + num_inputs = class_tokens_mask.sum().unsqueeze(0) # TODO: check for training case + batch_size, max_num_inputs = id_embeds.shape[:2] + # seq_length: 77 + seq_length = prompt_embeds.shape[1] + # flat_id_embeds shape: [b*max_num_inputs, 1, 2048] + flat_id_embeds = id_embeds.view( + -1, id_embeds.shape[-2], id_embeds.shape[-1] + ) + # valid_id_mask [b*max_num_inputs] + valid_id_mask = ( + torch.arange(max_num_inputs, device=flat_id_embeds.device)[None, :] + < num_inputs[:, None] + ) + valid_id_embeds = flat_id_embeds[valid_id_mask.flatten()] + + prompt_embeds = prompt_embeds.view(-1, prompt_embeds.shape[-1]) + class_tokens_mask = class_tokens_mask.view(-1) + valid_id_embeds = valid_id_embeds.view(-1, valid_id_embeds.shape[-1]) + # slice out the image token embeddings + image_token_embeds = prompt_embeds[class_tokens_mask] + stacked_id_embeds = self.fuse_fn(image_token_embeds, valid_id_embeds) + assert class_tokens_mask.sum() == stacked_id_embeds.shape[0], f"{class_tokens_mask.sum()} != {stacked_id_embeds.shape[0]}" + prompt_embeds.masked_scatter_(class_tokens_mask[:, None], stacked_id_embeds.to(prompt_embeds.dtype)) + updated_prompt_embeds = prompt_embeds.view(batch_size, seq_length, -1) + return updated_prompt_embeds + +class PhotoMakerIDEncoder(ldm_patched.modules.clip_model.CLIPVisionModelProjection): + def __init__(self): + self.load_device = ldm_patched.modules.model_management.text_encoder_device() + offload_device = ldm_patched.modules.model_management.text_encoder_offload_device() + dtype = ldm_patched.modules.model_management.text_encoder_dtype(self.load_device) + + super().__init__(VISION_CONFIG_DICT, dtype, offload_device, ldm_patched.modules.ops.manual_cast) + self.visual_projection_2 = ldm_patched.modules.ops.manual_cast.Linear(1024, 1280, bias=False) + self.fuse_module = FuseModule(2048, ldm_patched.modules.ops.manual_cast) + + def forward(self, id_pixel_values, prompt_embeds, class_tokens_mask): + b, num_inputs, c, h, w = id_pixel_values.shape + id_pixel_values = id_pixel_values.view(b * num_inputs, c, h, w) + + shared_id_embeds = self.vision_model(id_pixel_values)[2] + id_embeds = self.visual_projection(shared_id_embeds) + id_embeds_2 = self.visual_projection_2(shared_id_embeds) + + id_embeds = id_embeds.view(b, num_inputs, 1, -1) + id_embeds_2 = id_embeds_2.view(b, num_inputs, 1, -1) + + id_embeds = torch.cat((id_embeds, id_embeds_2), dim=-1) + updated_prompt_embeds = self.fuse_module(prompt_embeds, id_embeds, class_tokens_mask) + + return updated_prompt_embeds + + +class PhotoMakerLoader: + @classmethod + def INPUT_TYPES(s): + return {"required": { "photomaker_model_name": (ldm_patched.utils.path_utils.get_filename_list("photomaker"), )}} + + RETURN_TYPES = ("PHOTOMAKER",) + FUNCTION = "load_photomaker_model" + + CATEGORY = "_for_testing/photomaker" + + def load_photomaker_model(self, photomaker_model_name): + photomaker_model_path = ldm_patched.utils.path_utils.get_full_path("photomaker", photomaker_model_name) + photomaker_model = PhotoMakerIDEncoder() + data = ldm_patched.modules.utils.load_torch_file(photomaker_model_path, safe_load=True) + if "id_encoder" in data: + data = data["id_encoder"] + photomaker_model.load_state_dict(data) + return (photomaker_model,) + + +class PhotoMakerEncode: + @classmethod + def INPUT_TYPES(s): + return {"required": { "photomaker": ("PHOTOMAKER",), + "image": ("IMAGE",), + "clip": ("CLIP", ), + "text": ("STRING", {"multiline": True, "default": "photograph of photomaker"}), + }} + + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "apply_photomaker" + + CATEGORY = "_for_testing/photomaker" + + def apply_photomaker(self, photomaker, image, clip, text): + special_token = "photomaker" + pixel_values = ldm_patched.modules.clip_vision.clip_preprocess(image.to(photomaker.load_device)).float() + try: + index = text.split(" ").index(special_token) + 1 + except ValueError: + index = -1 + tokens = clip.tokenize(text, return_word_ids=True) + out_tokens = {} + for k in tokens: + out_tokens[k] = [] + for t in tokens[k]: + f = list(filter(lambda x: x[2] != index, t)) + while len(f) < len(t): + f.append(t[-1]) + out_tokens[k].append(f) + + cond, pooled = clip.encode_from_tokens(out_tokens, return_pooled=True) + + if index > 0: + token_index = index - 1 + num_id_images = 1 + class_tokens_mask = [True if token_index <= i < token_index+num_id_images else False for i in range(77)] + out = photomaker(id_pixel_values=pixel_values.unsqueeze(0), prompt_embeds=cond.to(photomaker.load_device), + class_tokens_mask=torch.tensor(class_tokens_mask, dtype=torch.bool, device=photomaker.load_device).unsqueeze(0)) + else: + out = cond + + return ([[out, {"pooled_output": pooled}]], ) + + +NODE_CLASS_MAPPINGS = { + "PhotoMakerLoader": PhotoMakerLoader, + "PhotoMakerEncode": PhotoMakerEncode, +} + diff --git a/ldm_patched/contrib/external_post_processing.py b/ldm_patched/contrib/external_post_processing.py new file mode 100644 index 0000000000000000000000000000000000000000..93cb12122c4f05ac0c0a4ed890f6a0af27916a02 --- /dev/null +++ b/ldm_patched/contrib/external_post_processing.py @@ -0,0 +1,278 @@ +# https://github.com/comfyanonymous/ComfyUI/blob/master/nodes.py + +import numpy as np +import torch +import torch.nn.functional as F +from PIL import Image +import math + +import ldm_patched.modules.utils + + +class Blend: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image1": ("IMAGE",), + "image2": ("IMAGE",), + "blend_factor": ("FLOAT", { + "default": 0.5, + "min": 0.0, + "max": 1.0, + "step": 0.01 + }), + "blend_mode": (["normal", "multiply", "screen", "overlay", "soft_light", "difference"],), + }, + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "blend_images" + + CATEGORY = "image/postprocessing" + + def blend_images(self, image1: torch.Tensor, image2: torch.Tensor, blend_factor: float, blend_mode: str): + image2 = image2.to(image1.device) + if image1.shape != image2.shape: + image2 = image2.permute(0, 3, 1, 2) + image2 = ldm_patched.modules.utils.common_upscale(image2, image1.shape[2], image1.shape[1], upscale_method='bicubic', crop='center') + image2 = image2.permute(0, 2, 3, 1) + + blended_image = self.blend_mode(image1, image2, blend_mode) + blended_image = image1 * (1 - blend_factor) + blended_image * blend_factor + blended_image = torch.clamp(blended_image, 0, 1) + return (blended_image,) + + def blend_mode(self, img1, img2, mode): + if mode == "normal": + return img2 + elif mode == "multiply": + return img1 * img2 + elif mode == "screen": + return 1 - (1 - img1) * (1 - img2) + elif mode == "overlay": + return torch.where(img1 <= 0.5, 2 * img1 * img2, 1 - 2 * (1 - img1) * (1 - img2)) + elif mode == "soft_light": + return torch.where(img2 <= 0.5, img1 - (1 - 2 * img2) * img1 * (1 - img1), img1 + (2 * img2 - 1) * (self.g(img1) - img1)) + elif mode == "difference": + return img1 - img2 + else: + raise ValueError(f"Unsupported blend mode: {mode}") + + def g(self, x): + return torch.where(x <= 0.25, ((16 * x - 12) * x + 4) * x, torch.sqrt(x)) + +def gaussian_kernel(kernel_size: int, sigma: float, device=None): + x, y = torch.meshgrid(torch.linspace(-1, 1, kernel_size, device=device), torch.linspace(-1, 1, kernel_size, device=device), indexing="ij") + d = torch.sqrt(x * x + y * y) + g = torch.exp(-(d * d) / (2.0 * sigma * sigma)) + return g / g.sum() + +class Blur: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "blur_radius": ("INT", { + "default": 1, + "min": 1, + "max": 31, + "step": 1 + }), + "sigma": ("FLOAT", { + "default": 1.0, + "min": 0.1, + "max": 10.0, + "step": 0.1 + }), + }, + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "blur" + + CATEGORY = "image/postprocessing" + + def blur(self, image: torch.Tensor, blur_radius: int, sigma: float): + if blur_radius == 0: + return (image,) + + batch_size, height, width, channels = image.shape + + kernel_size = blur_radius * 2 + 1 + kernel = gaussian_kernel(kernel_size, sigma, device=image.device).repeat(channels, 1, 1).unsqueeze(1) + + image = image.permute(0, 3, 1, 2) # Torch wants (B, C, H, W) we use (B, H, W, C) + padded_image = F.pad(image, (blur_radius,blur_radius,blur_radius,blur_radius), 'reflect') + blurred = F.conv2d(padded_image, kernel, padding=kernel_size // 2, groups=channels)[:,:,blur_radius:-blur_radius, blur_radius:-blur_radius] + blurred = blurred.permute(0, 2, 3, 1) + + return (blurred,) + +class Quantize: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "colors": ("INT", { + "default": 256, + "min": 1, + "max": 256, + "step": 1 + }), + "dither": (["none", "floyd-steinberg", "bayer-2", "bayer-4", "bayer-8", "bayer-16"],), + }, + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "quantize" + + CATEGORY = "image/postprocessing" + + def bayer(im, pal_im, order): + def normalized_bayer_matrix(n): + if n == 0: + return np.zeros((1,1), "float32") + else: + q = 4 ** n + m = q * normalized_bayer_matrix(n - 1) + return np.bmat(((m-1.5, m+0.5), (m+1.5, m-0.5))) / q + + num_colors = len(pal_im.getpalette()) // 3 + spread = 2 * 256 / num_colors + bayer_n = int(math.log2(order)) + bayer_matrix = torch.from_numpy(spread * normalized_bayer_matrix(bayer_n) + 0.5) + + result = torch.from_numpy(np.array(im).astype(np.float32)) + tw = math.ceil(result.shape[0] / bayer_matrix.shape[0]) + th = math.ceil(result.shape[1] / bayer_matrix.shape[1]) + tiled_matrix = bayer_matrix.tile(tw, th).unsqueeze(-1) + result.add_(tiled_matrix[:result.shape[0],:result.shape[1]]).clamp_(0, 255) + result = result.to(dtype=torch.uint8) + + im = Image.fromarray(result.cpu().numpy()) + im = im.quantize(palette=pal_im, dither=Image.Dither.NONE) + return im + + def quantize(self, image: torch.Tensor, colors: int, dither: str): + batch_size, height, width, _ = image.shape + result = torch.zeros_like(image) + + for b in range(batch_size): + im = Image.fromarray((image[b] * 255).to(torch.uint8).numpy(), mode='RGB') + + pal_im = im.quantize(colors=colors) # Required as described in https://github.com/python-pillow/Pillow/issues/5836 + + if dither == "none": + quantized_image = im.quantize(palette=pal_im, dither=Image.Dither.NONE) + elif dither == "floyd-steinberg": + quantized_image = im.quantize(palette=pal_im, dither=Image.Dither.FLOYDSTEINBERG) + elif dither.startswith("bayer"): + order = int(dither.split('-')[-1]) + quantized_image = Quantize.bayer(im, pal_im, order) + + quantized_array = torch.tensor(np.array(quantized_image.convert("RGB"))).float() / 255 + result[b] = quantized_array + + return (result,) + +class Sharpen: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "sharpen_radius": ("INT", { + "default": 1, + "min": 1, + "max": 31, + "step": 1 + }), + "sigma": ("FLOAT", { + "default": 1.0, + "min": 0.1, + "max": 10.0, + "step": 0.1 + }), + "alpha": ("FLOAT", { + "default": 1.0, + "min": 0.0, + "max": 5.0, + "step": 0.1 + }), + }, + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "sharpen" + + CATEGORY = "image/postprocessing" + + def sharpen(self, image: torch.Tensor, sharpen_radius: int, sigma:float, alpha: float): + if sharpen_radius == 0: + return (image,) + + batch_size, height, width, channels = image.shape + + kernel_size = sharpen_radius * 2 + 1 + kernel = gaussian_kernel(kernel_size, sigma, device=image.device) * -(alpha*10) + center = kernel_size // 2 + kernel[center, center] = kernel[center, center] - kernel.sum() + 1.0 + kernel = kernel.repeat(channels, 1, 1).unsqueeze(1) + + tensor_image = image.permute(0, 3, 1, 2) # Torch wants (B, C, H, W) we use (B, H, W, C) + tensor_image = F.pad(tensor_image, (sharpen_radius,sharpen_radius,sharpen_radius,sharpen_radius), 'reflect') + sharpened = F.conv2d(tensor_image, kernel, padding=center, groups=channels)[:,:,sharpen_radius:-sharpen_radius, sharpen_radius:-sharpen_radius] + sharpened = sharpened.permute(0, 2, 3, 1) + + result = torch.clamp(sharpened, 0, 1) + + return (result,) + +class ImageScaleToTotalPixels: + upscale_methods = ["nearest-exact", "bilinear", "area", "bicubic", "lanczos"] + crop_methods = ["disabled", "center"] + + @classmethod + def INPUT_TYPES(s): + return {"required": { "image": ("IMAGE",), "upscale_method": (s.upscale_methods,), + "megapixels": ("FLOAT", {"default": 1.0, "min": 0.01, "max": 16.0, "step": 0.01}), + }} + RETURN_TYPES = ("IMAGE",) + FUNCTION = "upscale" + + CATEGORY = "image/upscaling" + + def upscale(self, image, upscale_method, megapixels): + samples = image.movedim(-1,1) + total = int(megapixels * 1024 * 1024) + + scale_by = math.sqrt(total / (samples.shape[3] * samples.shape[2])) + width = round(samples.shape[3] * scale_by) + height = round(samples.shape[2] * scale_by) + + s = ldm_patched.modules.utils.common_upscale(samples, width, height, upscale_method, "disabled") + s = s.movedim(1,-1) + return (s,) + +NODE_CLASS_MAPPINGS = { + "ImageBlend": Blend, + "ImageBlur": Blur, + "ImageQuantize": Quantize, + "ImageSharpen": Sharpen, + "ImageScaleToTotalPixels": ImageScaleToTotalPixels, +} diff --git a/ldm_patched/contrib/external_rebatch.py b/ldm_patched/contrib/external_rebatch.py new file mode 100644 index 0000000000000000000000000000000000000000..c24cc8c32c61b349fe416be398a805d8b2fc0bfc --- /dev/null +++ b/ldm_patched/contrib/external_rebatch.py @@ -0,0 +1,140 @@ +# https://github.com/comfyanonymous/ComfyUI/blob/master/nodes.py + +import torch + +class LatentRebatch: + @classmethod + def INPUT_TYPES(s): + return {"required": { "latents": ("LATENT",), + "batch_size": ("INT", {"default": 1, "min": 1, "max": 4096}), + }} + RETURN_TYPES = ("LATENT",) + INPUT_IS_LIST = True + OUTPUT_IS_LIST = (True, ) + + FUNCTION = "rebatch" + + CATEGORY = "latent/batch" + + @staticmethod + def get_batch(latents, list_ind, offset): + '''prepare a batch out of the list of latents''' + samples = latents[list_ind]['samples'] + shape = samples.shape + mask = latents[list_ind]['noise_mask'] if 'noise_mask' in latents[list_ind] else torch.ones((shape[0], 1, shape[2]*8, shape[3]*8), device='cpu') + if mask.shape[-1] != shape[-1] * 8 or mask.shape[-2] != shape[-2]: + torch.nn.functional.interpolate(mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])), size=(shape[-2]*8, shape[-1]*8), mode="bilinear") + if mask.shape[0] < samples.shape[0]: + mask = mask.repeat((shape[0] - 1) // mask.shape[0] + 1, 1, 1, 1)[:shape[0]] + if 'batch_index' in latents[list_ind]: + batch_inds = latents[list_ind]['batch_index'] + else: + batch_inds = [x+offset for x in range(shape[0])] + return samples, mask, batch_inds + + @staticmethod + def get_slices(indexable, num, batch_size): + '''divides an indexable object into num slices of length batch_size, and a remainder''' + slices = [] + for i in range(num): + slices.append(indexable[i*batch_size:(i+1)*batch_size]) + if num * batch_size < len(indexable): + return slices, indexable[num * batch_size:] + else: + return slices, None + + @staticmethod + def slice_batch(batch, num, batch_size): + result = [LatentRebatch.get_slices(x, num, batch_size) for x in batch] + return list(zip(*result)) + + @staticmethod + def cat_batch(batch1, batch2): + if batch1[0] is None: + return batch2 + result = [torch.cat((b1, b2)) if torch.is_tensor(b1) else b1 + b2 for b1, b2 in zip(batch1, batch2)] + return result + + def rebatch(self, latents, batch_size): + batch_size = batch_size[0] + + output_list = [] + current_batch = (None, None, None) + processed = 0 + + for i in range(len(latents)): + # fetch new entry of list + #samples, masks, indices = self.get_batch(latents, i) + next_batch = self.get_batch(latents, i, processed) + processed += len(next_batch[2]) + # set to current if current is None + if current_batch[0] is None: + current_batch = next_batch + # add previous to list if dimensions do not match + elif next_batch[0].shape[-1] != current_batch[0].shape[-1] or next_batch[0].shape[-2] != current_batch[0].shape[-2]: + sliced, _ = self.slice_batch(current_batch, 1, batch_size) + output_list.append({'samples': sliced[0][0], 'noise_mask': sliced[1][0], 'batch_index': sliced[2][0]}) + current_batch = next_batch + # cat if everything checks out + else: + current_batch = self.cat_batch(current_batch, next_batch) + + # add to list if dimensions gone above target batch size + if current_batch[0].shape[0] > batch_size: + num = current_batch[0].shape[0] // batch_size + sliced, remainder = self.slice_batch(current_batch, num, batch_size) + + for i in range(num): + output_list.append({'samples': sliced[0][i], 'noise_mask': sliced[1][i], 'batch_index': sliced[2][i]}) + + current_batch = remainder + + #add remainder + if current_batch[0] is not None: + sliced, _ = self.slice_batch(current_batch, 1, batch_size) + output_list.append({'samples': sliced[0][0], 'noise_mask': sliced[1][0], 'batch_index': sliced[2][0]}) + + #get rid of empty masks + for s in output_list: + if s['noise_mask'].mean() == 1.0: + del s['noise_mask'] + + return (output_list,) + +class ImageRebatch: + @classmethod + def INPUT_TYPES(s): + return {"required": { "images": ("IMAGE",), + "batch_size": ("INT", {"default": 1, "min": 1, "max": 4096}), + }} + RETURN_TYPES = ("IMAGE",) + INPUT_IS_LIST = True + OUTPUT_IS_LIST = (True, ) + + FUNCTION = "rebatch" + + CATEGORY = "image/batch" + + def rebatch(self, images, batch_size): + batch_size = batch_size[0] + + output_list = [] + all_images = [] + for img in images: + for i in range(img.shape[0]): + all_images.append(img[i:i+1]) + + for i in range(0, len(all_images), batch_size): + output_list.append(torch.cat(all_images[i:i+batch_size], dim=0)) + + return (output_list,) + +NODE_CLASS_MAPPINGS = { + "RebatchLatents": LatentRebatch, + "RebatchImages": ImageRebatch, +} + +NODE_DISPLAY_NAME_MAPPINGS = { + "RebatchLatents": "Rebatch Latents", + "RebatchImages": "Rebatch Images", +} diff --git a/ldm_patched/contrib/external_sag.py b/ldm_patched/contrib/external_sag.py new file mode 100644 index 0000000000000000000000000000000000000000..804d56113b281e573a27859fdb4bae82e125882d --- /dev/null +++ b/ldm_patched/contrib/external_sag.py @@ -0,0 +1,172 @@ +# https://github.com/comfyanonymous/ComfyUI/blob/master/nodes.py + +import torch +from torch import einsum +import torch.nn.functional as F +import math + +from einops import rearrange, repeat +import os +from ldm_patched.ldm.modules.attention import optimized_attention, _ATTN_PRECISION +import ldm_patched.modules.samplers + +# from ldm_patched.modules/ldm/modules/attention.py +# but modified to return attention scores as well as output +def attention_basic_with_sim(q, k, v, heads, mask=None): + b, _, dim_head = q.shape + dim_head //= heads + scale = dim_head ** -0.5 + + h = heads + q, k, v = map( + lambda t: t.unsqueeze(3) + .reshape(b, -1, heads, dim_head) + .permute(0, 2, 1, 3) + .reshape(b * heads, -1, dim_head) + .contiguous(), + (q, k, v), + ) + + # force cast to fp32 to avoid overflowing + if _ATTN_PRECISION =="fp32": + sim = einsum('b i d, b j d -> b i j', q.float(), k.float()) * scale + else: + sim = einsum('b i d, b j d -> b i j', q, k) * scale + + del q, k + + if mask is not None: + mask = rearrange(mask, 'b ... -> b (...)') + max_neg_value = -torch.finfo(sim.dtype).max + mask = repeat(mask, 'b j -> (b h) () j', h=h) + sim.masked_fill_(~mask, max_neg_value) + + # attention, what we cannot get enough of + sim = sim.softmax(dim=-1) + + out = einsum('b i j, b j d -> b i d', sim.to(v.dtype), v) + out = ( + out.unsqueeze(0) + .reshape(b, heads, -1, dim_head) + .permute(0, 2, 1, 3) + .reshape(b, -1, heads * dim_head) + ) + return (out, sim) + +def create_blur_map(x0, attn, sigma=3.0, threshold=1.0): + # reshape and GAP the attention map + _, hw1, hw2 = attn.shape + b, _, lh, lw = x0.shape + attn = attn.reshape(b, -1, hw1, hw2) + # Global Average Pool + mask = attn.mean(1, keepdim=False).sum(1, keepdim=False) > threshold + ratio = 2**(math.ceil(math.sqrt(lh * lw / hw1)) - 1).bit_length() + mid_shape = [math.ceil(lh / ratio), math.ceil(lw / ratio)] + + # Reshape + mask = ( + mask.reshape(b, *mid_shape) + .unsqueeze(1) + .type(attn.dtype) + ) + # Upsample + mask = F.interpolate(mask, (lh, lw)) + + blurred = gaussian_blur_2d(x0, kernel_size=9, sigma=sigma) + blurred = blurred * mask + x0 * (1 - mask) + return blurred + +def gaussian_blur_2d(img, kernel_size, sigma): + ksize_half = (kernel_size - 1) * 0.5 + + x = torch.linspace(-ksize_half, ksize_half, steps=kernel_size) + + pdf = torch.exp(-0.5 * (x / sigma).pow(2)) + + x_kernel = pdf / pdf.sum() + x_kernel = x_kernel.to(device=img.device, dtype=img.dtype) + + kernel2d = torch.mm(x_kernel[:, None], x_kernel[None, :]) + kernel2d = kernel2d.expand(img.shape[-3], 1, kernel2d.shape[0], kernel2d.shape[1]) + + padding = [kernel_size // 2, kernel_size // 2, kernel_size // 2, kernel_size // 2] + + img = F.pad(img, padding, mode="reflect") + img = F.conv2d(img, kernel2d, groups=img.shape[-3]) + return img + +class SelfAttentionGuidance: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "scale": ("FLOAT", {"default": 0.5, "min": -2.0, "max": 5.0, "step": 0.1}), + "blur_sigma": ("FLOAT", {"default": 2.0, "min": 0.0, "max": 10.0, "step": 0.1}), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "_for_testing" + + def patch(self, model, scale, blur_sigma): + m = model.clone() + + attn_scores = None + + # TODO: make this work properly with chunked batches + # currently, we can only save the attn from one UNet call + def attn_and_record(q, k, v, extra_options): + nonlocal attn_scores + # if uncond, save the attention scores + heads = extra_options["n_heads"] + cond_or_uncond = extra_options["cond_or_uncond"] + b = q.shape[0] // len(cond_or_uncond) + if 1 in cond_or_uncond: + uncond_index = cond_or_uncond.index(1) + # do the entire attention operation, but save the attention scores to attn_scores + (out, sim) = attention_basic_with_sim(q, k, v, heads=heads) + # when using a higher batch size, I BELIEVE the result batch dimension is [uc1, ... ucn, c1, ... cn] + n_slices = heads * b + attn_scores = sim[n_slices * uncond_index:n_slices * (uncond_index+1)] + return out + else: + return optimized_attention(q, k, v, heads=heads) + + def post_cfg_function(args): + nonlocal attn_scores + uncond_attn = attn_scores + + sag_scale = scale + sag_sigma = blur_sigma + sag_threshold = 1.0 + model = args["model"] + uncond_pred = args["uncond_denoised"] + uncond = args["uncond"] + cfg_result = args["denoised"] + sigma = args["sigma"] + model_options = args["model_options"] + x = args["input"] + if min(cfg_result.shape[2:]) <= 4: #skip when too small to add padding + return cfg_result + + # create the adversarially blurred image + degraded = create_blur_map(uncond_pred, uncond_attn, sag_sigma, sag_threshold) + degraded_noised = degraded + x - uncond_pred + # call into the UNet + (sag, _) = ldm_patched.modules.samplers.calc_cond_uncond_batch(model, uncond, None, degraded_noised, sigma, model_options) + return cfg_result + (degraded - sag) * sag_scale + + m.set_model_sampler_post_cfg_function(post_cfg_function, disable_cfg1_optimization=True) + + # from diffusers: + # unet.mid_block.attentions[0].transformer_blocks[0].attn1.patch + m.set_model_attn1_replace(attn_and_record, "middle", 0, 0) + + return (m, ) + +NODE_CLASS_MAPPINGS = { + "SelfAttentionGuidance": SelfAttentionGuidance, +} + +NODE_DISPLAY_NAME_MAPPINGS = { + "SelfAttentionGuidance": "Self-Attention Guidance", +} diff --git a/ldm_patched/contrib/external_sdupscale.py b/ldm_patched/contrib/external_sdupscale.py new file mode 100644 index 0000000000000000000000000000000000000000..68153c478bf3b216235ce6a0241c2e1d84eb46af --- /dev/null +++ b/ldm_patched/contrib/external_sdupscale.py @@ -0,0 +1,49 @@ +# https://github.com/comfyanonymous/ComfyUI/blob/master/nodes.py + +import torch +import ldm_patched.contrib.external +import ldm_patched.modules.utils + +class SD_4XUpscale_Conditioning: + @classmethod + def INPUT_TYPES(s): + return {"required": { "images": ("IMAGE",), + "positive": ("CONDITIONING",), + "negative": ("CONDITIONING",), + "scale_ratio": ("FLOAT", {"default": 4.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "noise_augmentation": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001}), + }} + RETURN_TYPES = ("CONDITIONING", "CONDITIONING", "LATENT") + RETURN_NAMES = ("positive", "negative", "latent") + + FUNCTION = "encode" + + CATEGORY = "conditioning/upscale_diffusion" + + def encode(self, images, positive, negative, scale_ratio, noise_augmentation): + width = max(1, round(images.shape[-2] * scale_ratio)) + height = max(1, round(images.shape[-3] * scale_ratio)) + + pixels = ldm_patched.modules.utils.common_upscale((images.movedim(-1,1) * 2.0) - 1.0, width // 4, height // 4, "bilinear", "center") + + out_cp = [] + out_cn = [] + + for t in positive: + n = [t[0], t[1].copy()] + n[1]['concat_image'] = pixels + n[1]['noise_augmentation'] = noise_augmentation + out_cp.append(n) + + for t in negative: + n = [t[0], t[1].copy()] + n[1]['concat_image'] = pixels + n[1]['noise_augmentation'] = noise_augmentation + out_cn.append(n) + + latent = torch.zeros([images.shape[0], 4, height // 4, width // 4]) + return (out_cp, out_cn, {"samples":latent}) + +NODE_CLASS_MAPPINGS = { + "SD_4XUpscale_Conditioning": SD_4XUpscale_Conditioning, +} diff --git a/ldm_patched/contrib/external_stable3d.py b/ldm_patched/contrib/external_stable3d.py new file mode 100644 index 0000000000000000000000000000000000000000..bae2623fada0efa2d70f93af9bf475e8a619ebb4 --- /dev/null +++ b/ldm_patched/contrib/external_stable3d.py @@ -0,0 +1,104 @@ +# https://github.com/comfyanonymous/ComfyUI/blob/master/nodes.py + +import torch +import ldm_patched.contrib.external +import ldm_patched.modules.utils + +def camera_embeddings(elevation, azimuth): + elevation = torch.as_tensor([elevation]) + azimuth = torch.as_tensor([azimuth]) + embeddings = torch.stack( + [ + torch.deg2rad( + (90 - elevation) - (90) + ), # Zero123 polar is 90-elevation + torch.sin(torch.deg2rad(azimuth)), + torch.cos(torch.deg2rad(azimuth)), + torch.deg2rad( + 90 - torch.full_like(elevation, 0) + ), + ], dim=-1).unsqueeze(1) + + return embeddings + + +class StableZero123_Conditioning: + @classmethod + def INPUT_TYPES(s): + return {"required": { "clip_vision": ("CLIP_VISION",), + "init_image": ("IMAGE",), + "vae": ("VAE",), + "width": ("INT", {"default": 256, "min": 16, "max": ldm_patched.contrib.external.MAX_RESOLUTION, "step": 8}), + "height": ("INT", {"default": 256, "min": 16, "max": ldm_patched.contrib.external.MAX_RESOLUTION, "step": 8}), + "batch_size": ("INT", {"default": 1, "min": 1, "max": 4096}), + "elevation": ("FLOAT", {"default": 0.0, "min": -180.0, "max": 180.0}), + "azimuth": ("FLOAT", {"default": 0.0, "min": -180.0, "max": 180.0}), + }} + RETURN_TYPES = ("CONDITIONING", "CONDITIONING", "LATENT") + RETURN_NAMES = ("positive", "negative", "latent") + + FUNCTION = "encode" + + CATEGORY = "conditioning/3d_models" + + def encode(self, clip_vision, init_image, vae, width, height, batch_size, elevation, azimuth): + output = clip_vision.encode_image(init_image) + pooled = output.image_embeds.unsqueeze(0) + pixels = ldm_patched.modules.utils.common_upscale(init_image.movedim(-1,1), width, height, "bilinear", "center").movedim(1,-1) + encode_pixels = pixels[:,:,:,:3] + t = vae.encode(encode_pixels) + cam_embeds = camera_embeddings(elevation, azimuth) + cond = torch.cat([pooled, cam_embeds.to(pooled.device).repeat((pooled.shape[0], 1, 1))], dim=-1) + + positive = [[cond, {"concat_latent_image": t}]] + negative = [[torch.zeros_like(pooled), {"concat_latent_image": torch.zeros_like(t)}]] + latent = torch.zeros([batch_size, 4, height // 8, width // 8]) + return (positive, negative, {"samples":latent}) + +class StableZero123_Conditioning_Batched: + @classmethod + def INPUT_TYPES(s): + return {"required": { "clip_vision": ("CLIP_VISION",), + "init_image": ("IMAGE",), + "vae": ("VAE",), + "width": ("INT", {"default": 256, "min": 16, "max": ldm_patched.contrib.external.MAX_RESOLUTION, "step": 8}), + "height": ("INT", {"default": 256, "min": 16, "max": ldm_patched.contrib.external.MAX_RESOLUTION, "step": 8}), + "batch_size": ("INT", {"default": 1, "min": 1, "max": 4096}), + "elevation": ("FLOAT", {"default": 0.0, "min": -180.0, "max": 180.0}), + "azimuth": ("FLOAT", {"default": 0.0, "min": -180.0, "max": 180.0}), + "elevation_batch_increment": ("FLOAT", {"default": 0.0, "min": -180.0, "max": 180.0}), + "azimuth_batch_increment": ("FLOAT", {"default": 0.0, "min": -180.0, "max": 180.0}), + }} + RETURN_TYPES = ("CONDITIONING", "CONDITIONING", "LATENT") + RETURN_NAMES = ("positive", "negative", "latent") + + FUNCTION = "encode" + + CATEGORY = "conditioning/3d_models" + + def encode(self, clip_vision, init_image, vae, width, height, batch_size, elevation, azimuth, elevation_batch_increment, azimuth_batch_increment): + output = clip_vision.encode_image(init_image) + pooled = output.image_embeds.unsqueeze(0) + pixels = ldm_patched.modules.utils.common_upscale(init_image.movedim(-1,1), width, height, "bilinear", "center").movedim(1,-1) + encode_pixels = pixels[:,:,:,:3] + t = vae.encode(encode_pixels) + + cam_embeds = [] + for i in range(batch_size): + cam_embeds.append(camera_embeddings(elevation, azimuth)) + elevation += elevation_batch_increment + azimuth += azimuth_batch_increment + + cam_embeds = torch.cat(cam_embeds, dim=0) + cond = torch.cat([ldm_patched.modules.utils.repeat_to_batch_size(pooled, batch_size), cam_embeds], dim=-1) + + positive = [[cond, {"concat_latent_image": t}]] + negative = [[torch.zeros_like(pooled), {"concat_latent_image": torch.zeros_like(t)}]] + latent = torch.zeros([batch_size, 4, height // 8, width // 8]) + return (positive, negative, {"samples":latent, "batch_index": [0] * batch_size}) + + +NODE_CLASS_MAPPINGS = { + "StableZero123_Conditioning": StableZero123_Conditioning, + "StableZero123_Conditioning_Batched": StableZero123_Conditioning_Batched, +} diff --git a/ldm_patched/contrib/external_tomesd.py b/ldm_patched/contrib/external_tomesd.py new file mode 100644 index 0000000000000000000000000000000000000000..b01d6910f09c6ba1c7cfe3fe255acb9c4ea3545b --- /dev/null +++ b/ldm_patched/contrib/external_tomesd.py @@ -0,0 +1,179 @@ +# https://github.com/comfyanonymous/ComfyUI/blob/master/nodes.py + +#Taken from: https://github.com/dbolya/tomesd + +import torch +from typing import Tuple, Callable +import math + +def do_nothing(x: torch.Tensor, mode:str=None): + return x + + +def mps_gather_workaround(input, dim, index): + if input.shape[-1] == 1: + return torch.gather( + input.unsqueeze(-1), + dim - 1 if dim < 0 else dim, + index.unsqueeze(-1) + ).squeeze(-1) + else: + return torch.gather(input, dim, index) + + +def bipartite_soft_matching_random2d(metric: torch.Tensor, + w: int, h: int, sx: int, sy: int, r: int, + no_rand: bool = False) -> Tuple[Callable, Callable]: + """ + Partitions the tokens into src and dst and merges r tokens from src to dst. + Dst tokens are partitioned by choosing one randomy in each (sx, sy) region. + Args: + - metric [B, N, C]: metric to use for similarity + - w: image width in tokens + - h: image height in tokens + - sx: stride in the x dimension for dst, must divide w + - sy: stride in the y dimension for dst, must divide h + - r: number of tokens to remove (by merging) + - no_rand: if true, disable randomness (use top left corner only) + """ + B, N, _ = metric.shape + + if r <= 0 or w == 1 or h == 1: + return do_nothing, do_nothing + + gather = mps_gather_workaround if metric.device.type == "mps" else torch.gather + + with torch.no_grad(): + + hsy, wsx = h // sy, w // sx + + # For each sy by sx kernel, randomly assign one token to be dst and the rest src + if no_rand: + rand_idx = torch.zeros(hsy, wsx, 1, device=metric.device, dtype=torch.int64) + else: + rand_idx = torch.randint(sy*sx, size=(hsy, wsx, 1), device=metric.device) + + # The image might not divide sx and sy, so we need to work on a view of the top left if the idx buffer instead + idx_buffer_view = torch.zeros(hsy, wsx, sy*sx, device=metric.device, dtype=torch.int64) + idx_buffer_view.scatter_(dim=2, index=rand_idx, src=-torch.ones_like(rand_idx, dtype=rand_idx.dtype)) + idx_buffer_view = idx_buffer_view.view(hsy, wsx, sy, sx).transpose(1, 2).reshape(hsy * sy, wsx * sx) + + # Image is not divisible by sx or sy so we need to move it into a new buffer + if (hsy * sy) < h or (wsx * sx) < w: + idx_buffer = torch.zeros(h, w, device=metric.device, dtype=torch.int64) + idx_buffer[:(hsy * sy), :(wsx * sx)] = idx_buffer_view + else: + idx_buffer = idx_buffer_view + + # We set dst tokens to be -1 and src to be 0, so an argsort gives us dst|src indices + rand_idx = idx_buffer.reshape(1, -1, 1).argsort(dim=1) + + # We're finished with these + del idx_buffer, idx_buffer_view + + # rand_idx is currently dst|src, so split them + num_dst = hsy * wsx + a_idx = rand_idx[:, num_dst:, :] # src + b_idx = rand_idx[:, :num_dst, :] # dst + + def split(x): + C = x.shape[-1] + src = gather(x, dim=1, index=a_idx.expand(B, N - num_dst, C)) + dst = gather(x, dim=1, index=b_idx.expand(B, num_dst, C)) + return src, dst + + # Cosine similarity between A and B + metric = metric / metric.norm(dim=-1, keepdim=True) + a, b = split(metric) + scores = a @ b.transpose(-1, -2) + + # Can't reduce more than the # tokens in src + r = min(a.shape[1], r) + + # Find the most similar greedily + node_max, node_idx = scores.max(dim=-1) + edge_idx = node_max.argsort(dim=-1, descending=True)[..., None] + + unm_idx = edge_idx[..., r:, :] # Unmerged Tokens + src_idx = edge_idx[..., :r, :] # Merged Tokens + dst_idx = gather(node_idx[..., None], dim=-2, index=src_idx) + + def merge(x: torch.Tensor, mode="mean") -> torch.Tensor: + src, dst = split(x) + n, t1, c = src.shape + + unm = gather(src, dim=-2, index=unm_idx.expand(n, t1 - r, c)) + src = gather(src, dim=-2, index=src_idx.expand(n, r, c)) + dst = dst.scatter_reduce(-2, dst_idx.expand(n, r, c), src, reduce=mode) + + return torch.cat([unm, dst], dim=1) + + def unmerge(x: torch.Tensor) -> torch.Tensor: + unm_len = unm_idx.shape[1] + unm, dst = x[..., :unm_len, :], x[..., unm_len:, :] + _, _, c = unm.shape + + src = gather(dst, dim=-2, index=dst_idx.expand(B, r, c)) + + # Combine back to the original shape + out = torch.zeros(B, N, c, device=x.device, dtype=x.dtype) + out.scatter_(dim=-2, index=b_idx.expand(B, num_dst, c), src=dst) + out.scatter_(dim=-2, index=gather(a_idx.expand(B, a_idx.shape[1], 1), dim=1, index=unm_idx).expand(B, unm_len, c), src=unm) + out.scatter_(dim=-2, index=gather(a_idx.expand(B, a_idx.shape[1], 1), dim=1, index=src_idx).expand(B, r, c), src=src) + + return out + + return merge, unmerge + + +def get_functions(x, ratio, original_shape): + b, c, original_h, original_w = original_shape + original_tokens = original_h * original_w + downsample = int(math.ceil(math.sqrt(original_tokens // x.shape[1]))) + stride_x = 2 + stride_y = 2 + max_downsample = 1 + + if downsample <= max_downsample: + w = int(math.ceil(original_w / downsample)) + h = int(math.ceil(original_h / downsample)) + r = int(x.shape[1] * ratio) + no_rand = False + m, u = bipartite_soft_matching_random2d(x, w, h, stride_x, stride_y, r, no_rand) + return m, u + + nothing = lambda y: y + return nothing, nothing + + + +class TomePatchModel: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "ratio": ("FLOAT", {"default": 0.3, "min": 0.0, "max": 1.0, "step": 0.01}), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "_for_testing" + + def patch(self, model, ratio): + self.u = None + def tomesd_m(q, k, v, extra_options): + #NOTE: In the reference code get_functions takes x (input of the transformer block) as the argument instead of q + #however from my basic testing it seems that using q instead gives better results + m, self.u = get_functions(q, ratio, extra_options["original_shape"]) + return m(q), k, v + def tomesd_u(n, extra_options): + return self.u(n) + + m = model.clone() + m.set_model_attn1_patch(tomesd_m) + m.set_model_attn1_output_patch(tomesd_u) + return (m, ) + + +NODE_CLASS_MAPPINGS = { + "TomePatchModel": TomePatchModel, +} diff --git a/ldm_patched/contrib/external_upscale_model.py b/ldm_patched/contrib/external_upscale_model.py new file mode 100644 index 0000000000000000000000000000000000000000..31d102f0e884af374d19eadbf6cda5eb928926d9 --- /dev/null +++ b/ldm_patched/contrib/external_upscale_model.py @@ -0,0 +1,68 @@ +# https://github.com/comfyanonymous/ComfyUI/blob/master/nodes.py + +import os +from ldm_patched.pfn import model_loading +from ldm_patched.modules import model_management +import torch +import ldm_patched.modules.utils +import ldm_patched.utils.path_utils + +class UpscaleModelLoader: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model_name": (ldm_patched.utils.path_utils.get_filename_list("upscale_models"), ), + }} + RETURN_TYPES = ("UPSCALE_MODEL",) + FUNCTION = "load_model" + + CATEGORY = "loaders" + + def load_model(self, model_name): + model_path = ldm_patched.utils.path_utils.get_full_path("upscale_models", model_name) + sd = ldm_patched.modules.utils.load_torch_file(model_path, safe_load=True) + if "module.layers.0.residual_group.blocks.0.norm1.weight" in sd: + sd = ldm_patched.modules.utils.state_dict_prefix_replace(sd, {"module.":""}) + out = model_loading.load_state_dict(sd).eval() + return (out, ) + + +class ImageUpscaleWithModel: + @classmethod + def INPUT_TYPES(s): + return {"required": { "upscale_model": ("UPSCALE_MODEL",), + "image": ("IMAGE",), + }} + RETURN_TYPES = ("IMAGE",) + FUNCTION = "upscale" + + CATEGORY = "image/upscaling" + + def upscale(self, upscale_model, image): + device = model_management.get_torch_device() + upscale_model.to(device) + in_img = image.movedim(-1,-3).to(device) + free_memory = model_management.get_free_memory(device) + + tile = 512 + overlap = 32 + + oom = True + while oom: + try: + steps = in_img.shape[0] * ldm_patched.modules.utils.get_tiled_scale_steps(in_img.shape[3], in_img.shape[2], tile_x=tile, tile_y=tile, overlap=overlap) + pbar = ldm_patched.modules.utils.ProgressBar(steps) + s = ldm_patched.modules.utils.tiled_scale(in_img, lambda a: upscale_model(a), tile_x=tile, tile_y=tile, overlap=overlap, upscale_amount=upscale_model.scale, pbar=pbar) + oom = False + except model_management.OOM_EXCEPTION as e: + tile //= 2 + if tile < 128: + raise e + + upscale_model.cpu() + s = torch.clamp(s.movedim(-3,-1), min=0, max=1.0) + return (s,) + +NODE_CLASS_MAPPINGS = { + "UpscaleModelLoader": UpscaleModelLoader, + "ImageUpscaleWithModel": ImageUpscaleWithModel +} diff --git a/ldm_patched/contrib/external_video_model.py b/ldm_patched/contrib/external_video_model.py new file mode 100644 index 0000000000000000000000000000000000000000..503df0e181b445df5bd7879d9ffe4b251f3a43fe --- /dev/null +++ b/ldm_patched/contrib/external_video_model.py @@ -0,0 +1,108 @@ +# https://github.com/comfyanonymous/ComfyUI/blob/master/nodes.py + +import ldm_patched.contrib.external +import torch +import ldm_patched.modules.utils +import ldm_patched.modules.sd +import ldm_patched.utils.path_utils +import ldm_patched.contrib.external_model_merging + + +class ImageOnlyCheckpointLoader: + @classmethod + def INPUT_TYPES(s): + return {"required": { "ckpt_name": (ldm_patched.utils.path_utils.get_filename_list("checkpoints"), ), + }} + RETURN_TYPES = ("MODEL", "CLIP_VISION", "VAE") + FUNCTION = "load_checkpoint" + + CATEGORY = "loaders/video_models" + + def load_checkpoint(self, ckpt_name, output_vae=True, output_clip=True): + ckpt_path = ldm_patched.utils.path_utils.get_full_path("checkpoints", ckpt_name) + out = ldm_patched.modules.sd.load_checkpoint_guess_config(ckpt_path, output_vae=True, output_clip=False, output_clipvision=True, embedding_directory=ldm_patched.utils.path_utils.get_folder_paths("embeddings")) + return (out[0], out[3], out[2]) + + +class SVD_img2vid_Conditioning: + @classmethod + def INPUT_TYPES(s): + return {"required": { "clip_vision": ("CLIP_VISION",), + "init_image": ("IMAGE",), + "vae": ("VAE",), + "width": ("INT", {"default": 1024, "min": 16, "max": ldm_patched.contrib.external.MAX_RESOLUTION, "step": 8}), + "height": ("INT", {"default": 576, "min": 16, "max": ldm_patched.contrib.external.MAX_RESOLUTION, "step": 8}), + "video_frames": ("INT", {"default": 14, "min": 1, "max": 4096}), + "motion_bucket_id": ("INT", {"default": 127, "min": 1, "max": 1023}), + "fps": ("INT", {"default": 6, "min": 1, "max": 1024}), + "augmentation_level": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 10.0, "step": 0.01}) + }} + RETURN_TYPES = ("CONDITIONING", "CONDITIONING", "LATENT") + RETURN_NAMES = ("positive", "negative", "latent") + + FUNCTION = "encode" + + CATEGORY = "conditioning/video_models" + + def encode(self, clip_vision, init_image, vae, width, height, video_frames, motion_bucket_id, fps, augmentation_level): + output = clip_vision.encode_image(init_image) + pooled = output.image_embeds.unsqueeze(0) + pixels = ldm_patched.modules.utils.common_upscale(init_image.movedim(-1,1), width, height, "bilinear", "center").movedim(1,-1) + encode_pixels = pixels[:,:,:,:3] + if augmentation_level > 0: + encode_pixels += torch.randn_like(pixels) * augmentation_level + t = vae.encode(encode_pixels) + positive = [[pooled, {"motion_bucket_id": motion_bucket_id, "fps": fps, "augmentation_level": augmentation_level, "concat_latent_image": t}]] + negative = [[torch.zeros_like(pooled), {"motion_bucket_id": motion_bucket_id, "fps": fps, "augmentation_level": augmentation_level, "concat_latent_image": torch.zeros_like(t)}]] + latent = torch.zeros([video_frames, 4, height // 8, width // 8]) + return (positive, negative, {"samples":latent}) + +class VideoLinearCFGGuidance: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "min_cfg": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.5, "round": 0.01}), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "sampling/video_models" + + def patch(self, model, min_cfg): + def linear_cfg(args): + cond = args["cond"] + uncond = args["uncond"] + cond_scale = args["cond_scale"] + + scale = torch.linspace(min_cfg, cond_scale, cond.shape[0], device=cond.device).reshape((cond.shape[0], 1, 1, 1)) + return uncond + scale * (cond - uncond) + + m = model.clone() + m.set_model_sampler_cfg_function(linear_cfg) + return (m, ) + +class ImageOnlyCheckpointSave(ldm_patched.contrib.external_model_merging.CheckpointSave): + CATEGORY = "_for_testing" + + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "clip_vision": ("CLIP_VISION",), + "vae": ("VAE",), + "filename_prefix": ("STRING", {"default": "checkpoints/ldm_patched"}),}, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"},} + + def save(self, model, clip_vision, vae, filename_prefix, prompt=None, extra_pnginfo=None): + ldm_patched.contrib.external_model_merging.save_checkpoint(model, clip_vision=clip_vision, vae=vae, filename_prefix=filename_prefix, output_dir=self.output_dir, prompt=prompt, extra_pnginfo=extra_pnginfo) + return {} + +NODE_CLASS_MAPPINGS = { + "ImageOnlyCheckpointLoader": ImageOnlyCheckpointLoader, + "SVD_img2vid_Conditioning": SVD_img2vid_Conditioning, + "VideoLinearCFGGuidance": VideoLinearCFGGuidance, + "ImageOnlyCheckpointSave": ImageOnlyCheckpointSave, +} + +NODE_DISPLAY_NAME_MAPPINGS = { + "ImageOnlyCheckpointLoader": "Image Only Checkpoint Loader (img2vid model)", +} diff --git a/ldm_patched/controlnet/__pycache__/cldm.cpython-310.pyc b/ldm_patched/controlnet/__pycache__/cldm.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4a4eba376419b0244a78a6b76f6884b3a4976e27 Binary files /dev/null and b/ldm_patched/controlnet/__pycache__/cldm.cpython-310.pyc differ diff --git a/ldm_patched/controlnet/cldm.py b/ldm_patched/controlnet/cldm.py new file mode 100644 index 0000000000000000000000000000000000000000..82265ef955f6384895f0b530870195da7866fdbc --- /dev/null +++ b/ldm_patched/controlnet/cldm.py @@ -0,0 +1,312 @@ +#taken from: https://github.com/lllyasviel/ControlNet +#and modified + +import torch +import torch as th +import torch.nn as nn + +from ldm_patched.ldm.modules.diffusionmodules.util import ( + zero_module, + timestep_embedding, +) + +from ldm_patched.ldm.modules.attention import SpatialTransformer +from ldm_patched.ldm.modules.diffusionmodules.openaimodel import UNetModel, TimestepEmbedSequential, ResBlock, Downsample +from ldm_patched.ldm.util import exists +import ldm_patched.modules.ops + +class ControlledUnetModel(UNetModel): + #implemented in the ldm unet + pass + +class ControlNet(nn.Module): + def __init__( + self, + image_size, + in_channels, + model_channels, + hint_channels, + num_res_blocks, + dropout=0, + channel_mult=(1, 2, 4, 8), + conv_resample=True, + dims=2, + num_classes=None, + use_checkpoint=False, + dtype=torch.float32, + num_heads=-1, + num_head_channels=-1, + num_heads_upsample=-1, + use_scale_shift_norm=False, + resblock_updown=False, + use_new_attention_order=False, + use_spatial_transformer=False, # custom transformer support + transformer_depth=1, # custom transformer support + context_dim=None, # custom transformer support + n_embed=None, # custom support for prediction of discrete ids into codebook of first stage vq model + legacy=True, + disable_self_attentions=None, + num_attention_blocks=None, + disable_middle_self_attn=False, + use_linear_in_transformer=False, + adm_in_channels=None, + transformer_depth_middle=None, + transformer_depth_output=None, + device=None, + operations=ldm_patched.modules.ops.disable_weight_init, + **kwargs, + ): + super().__init__() + assert use_spatial_transformer == True, "use_spatial_transformer has to be true" + if use_spatial_transformer: + assert context_dim is not None, 'Fool!! You forgot to include the dimension of your cross-attention conditioning...' + + if context_dim is not None: + assert use_spatial_transformer, 'Fool!! You forgot to use the spatial transformer for your cross-attention conditioning...' + # from omegaconf.listconfig import ListConfig + # if type(context_dim) == ListConfig: + # context_dim = list(context_dim) + + if num_heads_upsample == -1: + num_heads_upsample = num_heads + + if num_heads == -1: + assert num_head_channels != -1, 'Either num_heads or num_head_channels has to be set' + + if num_head_channels == -1: + assert num_heads != -1, 'Either num_heads or num_head_channels has to be set' + + self.dims = dims + self.image_size = image_size + self.in_channels = in_channels + self.model_channels = model_channels + + if isinstance(num_res_blocks, int): + self.num_res_blocks = len(channel_mult) * [num_res_blocks] + else: + if len(num_res_blocks) != len(channel_mult): + raise ValueError("provide num_res_blocks either as an int (globally constant) or " + "as a list/tuple (per-level) with the same length as channel_mult") + self.num_res_blocks = num_res_blocks + + if disable_self_attentions is not None: + # should be a list of booleans, indicating whether to disable self-attention in TransformerBlocks or not + assert len(disable_self_attentions) == len(channel_mult) + if num_attention_blocks is not None: + assert len(num_attention_blocks) == len(self.num_res_blocks) + assert all(map(lambda i: self.num_res_blocks[i] >= num_attention_blocks[i], range(len(num_attention_blocks)))) + + transformer_depth = transformer_depth[:] + + self.dropout = dropout + self.channel_mult = channel_mult + self.conv_resample = conv_resample + self.num_classes = num_classes + self.use_checkpoint = use_checkpoint + self.dtype = dtype + self.num_heads = num_heads + self.num_head_channels = num_head_channels + self.num_heads_upsample = num_heads_upsample + self.predict_codebook_ids = n_embed is not None + + time_embed_dim = model_channels * 4 + self.time_embed = nn.Sequential( + operations.Linear(model_channels, time_embed_dim, dtype=self.dtype, device=device), + nn.SiLU(), + operations.Linear(time_embed_dim, time_embed_dim, dtype=self.dtype, device=device), + ) + + if self.num_classes is not None: + if isinstance(self.num_classes, int): + self.label_emb = nn.Embedding(num_classes, time_embed_dim) + elif self.num_classes == "continuous": + print("setting up linear c_adm embedding layer") + self.label_emb = nn.Linear(1, time_embed_dim) + elif self.num_classes == "sequential": + assert adm_in_channels is not None + self.label_emb = nn.Sequential( + nn.Sequential( + operations.Linear(adm_in_channels, time_embed_dim, dtype=self.dtype, device=device), + nn.SiLU(), + operations.Linear(time_embed_dim, time_embed_dim, dtype=self.dtype, device=device), + ) + ) + else: + raise ValueError() + + self.input_blocks = nn.ModuleList( + [ + TimestepEmbedSequential( + operations.conv_nd(dims, in_channels, model_channels, 3, padding=1, dtype=self.dtype, device=device) + ) + ] + ) + self.zero_convs = nn.ModuleList([self.make_zero_conv(model_channels, operations=operations, dtype=self.dtype, device=device)]) + + self.input_hint_block = TimestepEmbedSequential( + operations.conv_nd(dims, hint_channels, 16, 3, padding=1, dtype=self.dtype, device=device), + nn.SiLU(), + operations.conv_nd(dims, 16, 16, 3, padding=1, dtype=self.dtype, device=device), + nn.SiLU(), + operations.conv_nd(dims, 16, 32, 3, padding=1, stride=2, dtype=self.dtype, device=device), + nn.SiLU(), + operations.conv_nd(dims, 32, 32, 3, padding=1, dtype=self.dtype, device=device), + nn.SiLU(), + operations.conv_nd(dims, 32, 96, 3, padding=1, stride=2, dtype=self.dtype, device=device), + nn.SiLU(), + operations.conv_nd(dims, 96, 96, 3, padding=1, dtype=self.dtype, device=device), + nn.SiLU(), + operations.conv_nd(dims, 96, 256, 3, padding=1, stride=2, dtype=self.dtype, device=device), + nn.SiLU(), + operations.conv_nd(dims, 256, model_channels, 3, padding=1, dtype=self.dtype, device=device) + ) + + self._feature_size = model_channels + input_block_chans = [model_channels] + ch = model_channels + ds = 1 + for level, mult in enumerate(channel_mult): + for nr in range(self.num_res_blocks[level]): + layers = [ + ResBlock( + ch, + time_embed_dim, + dropout, + out_channels=mult * model_channels, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + dtype=self.dtype, + device=device, + operations=operations, + ) + ] + ch = mult * model_channels + num_transformers = transformer_depth.pop(0) + if num_transformers > 0: + if num_head_channels == -1: + dim_head = ch // num_heads + else: + num_heads = ch // num_head_channels + dim_head = num_head_channels + if legacy: + #num_heads = 1 + dim_head = ch // num_heads if use_spatial_transformer else num_head_channels + if exists(disable_self_attentions): + disabled_sa = disable_self_attentions[level] + else: + disabled_sa = False + + if not exists(num_attention_blocks) or nr < num_attention_blocks[level]: + layers.append( + SpatialTransformer( + ch, num_heads, dim_head, depth=num_transformers, context_dim=context_dim, + disable_self_attn=disabled_sa, use_linear=use_linear_in_transformer, + use_checkpoint=use_checkpoint, dtype=self.dtype, device=device, operations=operations + ) + ) + self.input_blocks.append(TimestepEmbedSequential(*layers)) + self.zero_convs.append(self.make_zero_conv(ch, operations=operations, dtype=self.dtype, device=device)) + self._feature_size += ch + input_block_chans.append(ch) + if level != len(channel_mult) - 1: + out_ch = ch + self.input_blocks.append( + TimestepEmbedSequential( + ResBlock( + ch, + time_embed_dim, + dropout, + out_channels=out_ch, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + down=True, + dtype=self.dtype, + device=device, + operations=operations + ) + if resblock_updown + else Downsample( + ch, conv_resample, dims=dims, out_channels=out_ch, dtype=self.dtype, device=device, operations=operations + ) + ) + ) + ch = out_ch + input_block_chans.append(ch) + self.zero_convs.append(self.make_zero_conv(ch, operations=operations, dtype=self.dtype, device=device)) + ds *= 2 + self._feature_size += ch + + if num_head_channels == -1: + dim_head = ch // num_heads + else: + num_heads = ch // num_head_channels + dim_head = num_head_channels + if legacy: + #num_heads = 1 + dim_head = ch // num_heads if use_spatial_transformer else num_head_channels + mid_block = [ + ResBlock( + ch, + time_embed_dim, + dropout, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + dtype=self.dtype, + device=device, + operations=operations + )] + if transformer_depth_middle >= 0: + mid_block += [SpatialTransformer( # always uses a self-attn + ch, num_heads, dim_head, depth=transformer_depth_middle, context_dim=context_dim, + disable_self_attn=disable_middle_self_attn, use_linear=use_linear_in_transformer, + use_checkpoint=use_checkpoint, dtype=self.dtype, device=device, operations=operations + ), + ResBlock( + ch, + time_embed_dim, + dropout, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + dtype=self.dtype, + device=device, + operations=operations + )] + self.middle_block = TimestepEmbedSequential(*mid_block) + self.middle_block_out = self.make_zero_conv(ch, operations=operations, dtype=self.dtype, device=device) + self._feature_size += ch + + def make_zero_conv(self, channels, operations=None, dtype=None, device=None): + return TimestepEmbedSequential(operations.conv_nd(self.dims, channels, channels, 1, padding=0, dtype=dtype, device=device)) + + def forward(self, x, hint, timesteps, context, y=None, **kwargs): + t_emb = timestep_embedding(timesteps, self.model_channels, repeat_only=False).to(x.dtype) + emb = self.time_embed(t_emb) + + guided_hint = self.input_hint_block(hint, emb, context) + + outs = [] + + hs = [] + if self.num_classes is not None: + assert y.shape[0] == x.shape[0] + emb = emb + self.label_emb(y) + + h = x + for module, zero_conv in zip(self.input_blocks, self.zero_convs): + if guided_hint is not None: + h = module(h, emb, context) + h += guided_hint + guided_hint = None + else: + h = module(h, emb, context) + outs.append(zero_conv(h, emb, context)) + + h = self.middle_block(h, emb, context) + outs.append(self.middle_block_out(h, emb, context)) + + return outs + diff --git a/ldm_patched/k_diffusion/__pycache__/sampling.cpython-310.pyc b/ldm_patched/k_diffusion/__pycache__/sampling.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..800b85eab7703f20c573f2ed9b5c47e64028603d Binary files /dev/null and b/ldm_patched/k_diffusion/__pycache__/sampling.cpython-310.pyc differ diff --git a/ldm_patched/k_diffusion/__pycache__/utils.cpython-310.pyc b/ldm_patched/k_diffusion/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4b98fe647da519edbec3e9e72df6daf5e0d62020 Binary files /dev/null and b/ldm_patched/k_diffusion/__pycache__/utils.cpython-310.pyc differ diff --git a/ldm_patched/k_diffusion/sampling.py b/ldm_patched/k_diffusion/sampling.py new file mode 100644 index 0000000000000000000000000000000000000000..761c2e0ef7cb66e2b2f918f7477bd5ca1801ea88 --- /dev/null +++ b/ldm_patched/k_diffusion/sampling.py @@ -0,0 +1,810 @@ +import math + +from scipy import integrate +import torch +from torch import nn +import torchsde +from tqdm.auto import trange, tqdm + +from . import utils + + +def append_zero(x): + return torch.cat([x, x.new_zeros([1])]) + + +def get_sigmas_karras(n, sigma_min, sigma_max, rho=7., device='cpu'): + """Constructs the noise schedule of Karras et al. (2022).""" + ramp = torch.linspace(0, 1, n, device=device) + min_inv_rho = sigma_min ** (1 / rho) + max_inv_rho = sigma_max ** (1 / rho) + sigmas = (max_inv_rho + ramp * (min_inv_rho - max_inv_rho)) ** rho + return append_zero(sigmas).to(device) + + +def get_sigmas_exponential(n, sigma_min, sigma_max, device='cpu'): + """Constructs an exponential noise schedule.""" + sigmas = torch.linspace(math.log(sigma_max), math.log(sigma_min), n, device=device).exp() + return append_zero(sigmas) + + +def get_sigmas_polyexponential(n, sigma_min, sigma_max, rho=1., device='cpu'): + """Constructs an polynomial in log sigma noise schedule.""" + ramp = torch.linspace(1, 0, n, device=device) ** rho + sigmas = torch.exp(ramp * (math.log(sigma_max) - math.log(sigma_min)) + math.log(sigma_min)) + return append_zero(sigmas) + + +def get_sigmas_vp(n, beta_d=19.9, beta_min=0.1, eps_s=1e-3, device='cpu'): + """Constructs a continuous VP noise schedule.""" + t = torch.linspace(1, eps_s, n, device=device) + sigmas = torch.sqrt(torch.exp(beta_d * t ** 2 / 2 + beta_min * t) - 1) + return append_zero(sigmas) + + +def to_d(x, sigma, denoised): + """Converts a denoiser output to a Karras ODE derivative.""" + return (x - denoised) / utils.append_dims(sigma, x.ndim) + + +def get_ancestral_step(sigma_from, sigma_to, eta=1.): + """Calculates the noise level (sigma_down) to step down to and the amount + of noise to add (sigma_up) when doing an ancestral sampling step.""" + if not eta: + return sigma_to, 0. + sigma_up = min(sigma_to, eta * (sigma_to ** 2 * (sigma_from ** 2 - sigma_to ** 2) / sigma_from ** 2) ** 0.5) + sigma_down = (sigma_to ** 2 - sigma_up ** 2) ** 0.5 + return sigma_down, sigma_up + + +def default_noise_sampler(x): + return lambda sigma, sigma_next: torch.randn_like(x) + + +class BatchedBrownianTree: + """A wrapper around torchsde.BrownianTree that enables batches of entropy.""" + + def __init__(self, x, t0, t1, seed=None, **kwargs): + self.cpu_tree = True + if "cpu" in kwargs: + self.cpu_tree = kwargs.pop("cpu") + t0, t1, self.sign = self.sort(t0, t1) + w0 = kwargs.get('w0', torch.zeros_like(x)) + if seed is None: + seed = torch.randint(0, 2 ** 63 - 1, []).item() + self.batched = True + try: + assert len(seed) == x.shape[0] + w0 = w0[0] + except TypeError: + seed = [seed] + self.batched = False + if self.cpu_tree: + self.trees = [torchsde.BrownianTree(t0.cpu(), w0.cpu(), t1.cpu(), entropy=s, **kwargs) for s in seed] + else: + self.trees = [torchsde.BrownianTree(t0, w0, t1, entropy=s, **kwargs) for s in seed] + + @staticmethod + def sort(a, b): + return (a, b, 1) if a < b else (b, a, -1) + + def __call__(self, t0, t1): + t0, t1, sign = self.sort(t0, t1) + if self.cpu_tree: + w = torch.stack([tree(t0.cpu().float(), t1.cpu().float()).to(t0.dtype).to(t0.device) for tree in self.trees]) * (self.sign * sign) + else: + w = torch.stack([tree(t0, t1) for tree in self.trees]) * (self.sign * sign) + + return w if self.batched else w[0] + + +class BrownianTreeNoiseSampler: + """A noise sampler backed by a torchsde.BrownianTree. + + Args: + x (Tensor): The tensor whose shape, device and dtype to use to generate + random samples. + sigma_min (float): The low end of the valid interval. + sigma_max (float): The high end of the valid interval. + seed (int or List[int]): The random seed. If a list of seeds is + supplied instead of a single integer, then the noise sampler will + use one BrownianTree per batch item, each with its own seed. + transform (callable): A function that maps sigma to the sampler's + internal timestep. + """ + + def __init__(self, x, sigma_min, sigma_max, seed=None, transform=lambda x: x, cpu=False): + self.transform = transform + t0, t1 = self.transform(torch.as_tensor(sigma_min)), self.transform(torch.as_tensor(sigma_max)) + self.tree = BatchedBrownianTree(x, t0, t1, seed, cpu=cpu) + + def __call__(self, sigma, sigma_next): + t0, t1 = self.transform(torch.as_tensor(sigma)), self.transform(torch.as_tensor(sigma_next)) + return self.tree(t0, t1) / (t1 - t0).abs().sqrt() + + +@torch.no_grad() +def sample_euler(model, x, sigmas, extra_args=None, callback=None, disable=None, s_churn=0., s_tmin=0., s_tmax=float('inf'), s_noise=1.): + """Implements Algorithm 2 (Euler steps) from Karras et al. (2022).""" + extra_args = {} if extra_args is None else extra_args + s_in = x.new_ones([x.shape[0]]) + for i in trange(len(sigmas) - 1, disable=disable): + gamma = min(s_churn / (len(sigmas) - 1), 2 ** 0.5 - 1) if s_tmin <= sigmas[i] <= s_tmax else 0. + sigma_hat = sigmas[i] * (gamma + 1) + if gamma > 0: + eps = torch.randn_like(x) * s_noise + x = x + eps * (sigma_hat ** 2 - sigmas[i] ** 2) ** 0.5 + denoised = model(x, sigma_hat * s_in, **extra_args) + d = to_d(x, sigma_hat, denoised) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigma_hat, 'denoised': denoised}) + dt = sigmas[i + 1] - sigma_hat + # Euler method + x = x + d * dt + return x + + +@torch.no_grad() +def sample_euler_ancestral(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None): + """Ancestral sampling with Euler method steps.""" + extra_args = {} if extra_args is None else extra_args + noise_sampler = default_noise_sampler(x) if noise_sampler is None else noise_sampler + s_in = x.new_ones([x.shape[0]]) + for i in trange(len(sigmas) - 1, disable=disable): + denoised = model(x, sigmas[i] * s_in, **extra_args) + sigma_down, sigma_up = get_ancestral_step(sigmas[i], sigmas[i + 1], eta=eta) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) + d = to_d(x, sigmas[i], denoised) + # Euler method + dt = sigma_down - sigmas[i] + x = x + d * dt + if sigmas[i + 1] > 0: + x = x + noise_sampler(sigmas[i], sigmas[i + 1]) * s_noise * sigma_up + return x + + +@torch.no_grad() +def sample_heun(model, x, sigmas, extra_args=None, callback=None, disable=None, s_churn=0., s_tmin=0., s_tmax=float('inf'), s_noise=1.): + """Implements Algorithm 2 (Heun steps) from Karras et al. (2022).""" + extra_args = {} if extra_args is None else extra_args + s_in = x.new_ones([x.shape[0]]) + for i in trange(len(sigmas) - 1, disable=disable): + gamma = min(s_churn / (len(sigmas) - 1), 2 ** 0.5 - 1) if s_tmin <= sigmas[i] <= s_tmax else 0. + sigma_hat = sigmas[i] * (gamma + 1) + if gamma > 0: + eps = torch.randn_like(x) * s_noise + x = x + eps * (sigma_hat ** 2 - sigmas[i] ** 2) ** 0.5 + denoised = model(x, sigma_hat * s_in, **extra_args) + d = to_d(x, sigma_hat, denoised) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigma_hat, 'denoised': denoised}) + dt = sigmas[i + 1] - sigma_hat + if sigmas[i + 1] == 0: + # Euler method + x = x + d * dt + else: + # Heun's method + x_2 = x + d * dt + denoised_2 = model(x_2, sigmas[i + 1] * s_in, **extra_args) + d_2 = to_d(x_2, sigmas[i + 1], denoised_2) + d_prime = (d + d_2) / 2 + x = x + d_prime * dt + return x + + +@torch.no_grad() +def sample_dpm_2(model, x, sigmas, extra_args=None, callback=None, disable=None, s_churn=0., s_tmin=0., s_tmax=float('inf'), s_noise=1.): + """A sampler inspired by DPM-Solver-2 and Algorithm 2 from Karras et al. (2022).""" + extra_args = {} if extra_args is None else extra_args + s_in = x.new_ones([x.shape[0]]) + for i in trange(len(sigmas) - 1, disable=disable): + gamma = min(s_churn / (len(sigmas) - 1), 2 ** 0.5 - 1) if s_tmin <= sigmas[i] <= s_tmax else 0. + sigma_hat = sigmas[i] * (gamma + 1) + if gamma > 0: + eps = torch.randn_like(x) * s_noise + x = x + eps * (sigma_hat ** 2 - sigmas[i] ** 2) ** 0.5 + denoised = model(x, sigma_hat * s_in, **extra_args) + d = to_d(x, sigma_hat, denoised) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigma_hat, 'denoised': denoised}) + if sigmas[i + 1] == 0: + # Euler method + dt = sigmas[i + 1] - sigma_hat + x = x + d * dt + else: + # DPM-Solver-2 + sigma_mid = sigma_hat.log().lerp(sigmas[i + 1].log(), 0.5).exp() + dt_1 = sigma_mid - sigma_hat + dt_2 = sigmas[i + 1] - sigma_hat + x_2 = x + d * dt_1 + denoised_2 = model(x_2, sigma_mid * s_in, **extra_args) + d_2 = to_d(x_2, sigma_mid, denoised_2) + x = x + d_2 * dt_2 + return x + + +@torch.no_grad() +def sample_dpm_2_ancestral(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None): + """Ancestral sampling with DPM-Solver second-order steps.""" + extra_args = {} if extra_args is None else extra_args + noise_sampler = default_noise_sampler(x) if noise_sampler is None else noise_sampler + s_in = x.new_ones([x.shape[0]]) + for i in trange(len(sigmas) - 1, disable=disable): + denoised = model(x, sigmas[i] * s_in, **extra_args) + sigma_down, sigma_up = get_ancestral_step(sigmas[i], sigmas[i + 1], eta=eta) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) + d = to_d(x, sigmas[i], denoised) + if sigma_down == 0: + # Euler method + dt = sigma_down - sigmas[i] + x = x + d * dt + else: + # DPM-Solver-2 + sigma_mid = sigmas[i].log().lerp(sigma_down.log(), 0.5).exp() + dt_1 = sigma_mid - sigmas[i] + dt_2 = sigma_down - sigmas[i] + x_2 = x + d * dt_1 + denoised_2 = model(x_2, sigma_mid * s_in, **extra_args) + d_2 = to_d(x_2, sigma_mid, denoised_2) + x = x + d_2 * dt_2 + x = x + noise_sampler(sigmas[i], sigmas[i + 1]) * s_noise * sigma_up + return x + + +def linear_multistep_coeff(order, t, i, j): + if order - 1 > i: + raise ValueError(f'Order {order} too high for step {i}') + def fn(tau): + prod = 1. + for k in range(order): + if j == k: + continue + prod *= (tau - t[i - k]) / (t[i - j] - t[i - k]) + return prod + return integrate.quad(fn, t[i], t[i + 1], epsrel=1e-4)[0] + + +@torch.no_grad() +def sample_lms(model, x, sigmas, extra_args=None, callback=None, disable=None, order=4): + extra_args = {} if extra_args is None else extra_args + s_in = x.new_ones([x.shape[0]]) + sigmas_cpu = sigmas.detach().cpu().numpy() + ds = [] + for i in trange(len(sigmas) - 1, disable=disable): + denoised = model(x, sigmas[i] * s_in, **extra_args) + d = to_d(x, sigmas[i], denoised) + ds.append(d) + if len(ds) > order: + ds.pop(0) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) + cur_order = min(i + 1, order) + coeffs = [linear_multistep_coeff(cur_order, sigmas_cpu, i, j) for j in range(cur_order)] + x = x + sum(coeff * d for coeff, d in zip(coeffs, reversed(ds))) + return x + + +class PIDStepSizeController: + """A PID controller for ODE adaptive step size control.""" + def __init__(self, h, pcoeff, icoeff, dcoeff, order=1, accept_safety=0.81, eps=1e-8): + self.h = h + self.b1 = (pcoeff + icoeff + dcoeff) / order + self.b2 = -(pcoeff + 2 * dcoeff) / order + self.b3 = dcoeff / order + self.accept_safety = accept_safety + self.eps = eps + self.errs = [] + + def limiter(self, x): + return 1 + math.atan(x - 1) + + def propose_step(self, error): + inv_error = 1 / (float(error) + self.eps) + if not self.errs: + self.errs = [inv_error, inv_error, inv_error] + self.errs[0] = inv_error + factor = self.errs[0] ** self.b1 * self.errs[1] ** self.b2 * self.errs[2] ** self.b3 + factor = self.limiter(factor) + accept = factor >= self.accept_safety + if accept: + self.errs[2] = self.errs[1] + self.errs[1] = self.errs[0] + self.h *= factor + return accept + + +class DPMSolver(nn.Module): + """DPM-Solver. See https://arxiv.org/abs/2206.00927.""" + + def __init__(self, model, extra_args=None, eps_callback=None, info_callback=None): + super().__init__() + self.model = model + self.extra_args = {} if extra_args is None else extra_args + self.eps_callback = eps_callback + self.info_callback = info_callback + + def t(self, sigma): + return -sigma.log() + + def sigma(self, t): + return t.neg().exp() + + def eps(self, eps_cache, key, x, t, *args, **kwargs): + if key in eps_cache: + return eps_cache[key], eps_cache + sigma = self.sigma(t) * x.new_ones([x.shape[0]]) + eps = (x - self.model(x, sigma, *args, **self.extra_args, **kwargs)) / self.sigma(t) + if self.eps_callback is not None: + self.eps_callback() + return eps, {key: eps, **eps_cache} + + def dpm_solver_1_step(self, x, t, t_next, eps_cache=None): + eps_cache = {} if eps_cache is None else eps_cache + h = t_next - t + eps, eps_cache = self.eps(eps_cache, 'eps', x, t) + x_1 = x - self.sigma(t_next) * h.expm1() * eps + return x_1, eps_cache + + def dpm_solver_2_step(self, x, t, t_next, r1=1 / 2, eps_cache=None): + eps_cache = {} if eps_cache is None else eps_cache + h = t_next - t + eps, eps_cache = self.eps(eps_cache, 'eps', x, t) + s1 = t + r1 * h + u1 = x - self.sigma(s1) * (r1 * h).expm1() * eps + eps_r1, eps_cache = self.eps(eps_cache, 'eps_r1', u1, s1) + x_2 = x - self.sigma(t_next) * h.expm1() * eps - self.sigma(t_next) / (2 * r1) * h.expm1() * (eps_r1 - eps) + return x_2, eps_cache + + def dpm_solver_3_step(self, x, t, t_next, r1=1 / 3, r2=2 / 3, eps_cache=None): + eps_cache = {} if eps_cache is None else eps_cache + h = t_next - t + eps, eps_cache = self.eps(eps_cache, 'eps', x, t) + s1 = t + r1 * h + s2 = t + r2 * h + u1 = x - self.sigma(s1) * (r1 * h).expm1() * eps + eps_r1, eps_cache = self.eps(eps_cache, 'eps_r1', u1, s1) + u2 = x - self.sigma(s2) * (r2 * h).expm1() * eps - self.sigma(s2) * (r2 / r1) * ((r2 * h).expm1() / (r2 * h) - 1) * (eps_r1 - eps) + eps_r2, eps_cache = self.eps(eps_cache, 'eps_r2', u2, s2) + x_3 = x - self.sigma(t_next) * h.expm1() * eps - self.sigma(t_next) / r2 * (h.expm1() / h - 1) * (eps_r2 - eps) + return x_3, eps_cache + + def dpm_solver_fast(self, x, t_start, t_end, nfe, eta=0., s_noise=1., noise_sampler=None): + noise_sampler = default_noise_sampler(x) if noise_sampler is None else noise_sampler + if not t_end > t_start and eta: + raise ValueError('eta must be 0 for reverse sampling') + + m = math.floor(nfe / 3) + 1 + ts = torch.linspace(t_start, t_end, m + 1, device=x.device) + + if nfe % 3 == 0: + orders = [3] * (m - 2) + [2, 1] + else: + orders = [3] * (m - 1) + [nfe % 3] + + for i in range(len(orders)): + eps_cache = {} + t, t_next = ts[i], ts[i + 1] + if eta: + sd, su = get_ancestral_step(self.sigma(t), self.sigma(t_next), eta) + t_next_ = torch.minimum(t_end, self.t(sd)) + su = (self.sigma(t_next) ** 2 - self.sigma(t_next_) ** 2) ** 0.5 + else: + t_next_, su = t_next, 0. + + eps, eps_cache = self.eps(eps_cache, 'eps', x, t) + denoised = x - self.sigma(t) * eps + if self.info_callback is not None: + self.info_callback({'x': x, 'i': i, 't': ts[i], 't_up': t, 'denoised': denoised}) + + if orders[i] == 1: + x, eps_cache = self.dpm_solver_1_step(x, t, t_next_, eps_cache=eps_cache) + elif orders[i] == 2: + x, eps_cache = self.dpm_solver_2_step(x, t, t_next_, eps_cache=eps_cache) + else: + x, eps_cache = self.dpm_solver_3_step(x, t, t_next_, eps_cache=eps_cache) + + x = x + su * s_noise * noise_sampler(self.sigma(t), self.sigma(t_next)) + + return x + + def dpm_solver_adaptive(self, x, t_start, t_end, order=3, rtol=0.05, atol=0.0078, h_init=0.05, pcoeff=0., icoeff=1., dcoeff=0., accept_safety=0.81, eta=0., s_noise=1., noise_sampler=None): + noise_sampler = default_noise_sampler(x) if noise_sampler is None else noise_sampler + if order not in {2, 3}: + raise ValueError('order should be 2 or 3') + forward = t_end > t_start + if not forward and eta: + raise ValueError('eta must be 0 for reverse sampling') + h_init = abs(h_init) * (1 if forward else -1) + atol = torch.tensor(atol) + rtol = torch.tensor(rtol) + s = t_start + x_prev = x + accept = True + pid = PIDStepSizeController(h_init, pcoeff, icoeff, dcoeff, 1.5 if eta else order, accept_safety) + info = {'steps': 0, 'nfe': 0, 'n_accept': 0, 'n_reject': 0} + + while s < t_end - 1e-5 if forward else s > t_end + 1e-5: + eps_cache = {} + t = torch.minimum(t_end, s + pid.h) if forward else torch.maximum(t_end, s + pid.h) + if eta: + sd, su = get_ancestral_step(self.sigma(s), self.sigma(t), eta) + t_ = torch.minimum(t_end, self.t(sd)) + su = (self.sigma(t) ** 2 - self.sigma(t_) ** 2) ** 0.5 + else: + t_, su = t, 0. + + eps, eps_cache = self.eps(eps_cache, 'eps', x, s) + denoised = x - self.sigma(s) * eps + + if order == 2: + x_low, eps_cache = self.dpm_solver_1_step(x, s, t_, eps_cache=eps_cache) + x_high, eps_cache = self.dpm_solver_2_step(x, s, t_, eps_cache=eps_cache) + else: + x_low, eps_cache = self.dpm_solver_2_step(x, s, t_, r1=1 / 3, eps_cache=eps_cache) + x_high, eps_cache = self.dpm_solver_3_step(x, s, t_, eps_cache=eps_cache) + delta = torch.maximum(atol, rtol * torch.maximum(x_low.abs(), x_prev.abs())) + error = torch.linalg.norm((x_low - x_high) / delta) / x.numel() ** 0.5 + accept = pid.propose_step(error) + if accept: + x_prev = x_low + x = x_high + su * s_noise * noise_sampler(self.sigma(s), self.sigma(t)) + s = t + info['n_accept'] += 1 + else: + info['n_reject'] += 1 + info['nfe'] += order + info['steps'] += 1 + + if self.info_callback is not None: + self.info_callback({'x': x, 'i': info['steps'] - 1, 't': s, 't_up': s, 'denoised': denoised, 'error': error, 'h': pid.h, **info}) + + return x, info + + +@torch.no_grad() +def sample_dpm_fast(model, x, sigma_min, sigma_max, n, extra_args=None, callback=None, disable=None, eta=0., s_noise=1., noise_sampler=None): + """DPM-Solver-Fast (fixed step size). See https://arxiv.org/abs/2206.00927.""" + if sigma_min <= 0 or sigma_max <= 0: + raise ValueError('sigma_min and sigma_max must not be 0') + with tqdm(total=n, disable=disable) as pbar: + dpm_solver = DPMSolver(model, extra_args, eps_callback=pbar.update) + if callback is not None: + dpm_solver.info_callback = lambda info: callback({'sigma': dpm_solver.sigma(info['t']), 'sigma_hat': dpm_solver.sigma(info['t_up']), **info}) + return dpm_solver.dpm_solver_fast(x, dpm_solver.t(torch.tensor(sigma_max)), dpm_solver.t(torch.tensor(sigma_min)), n, eta, s_noise, noise_sampler) + + +@torch.no_grad() +def sample_dpm_adaptive(model, x, sigma_min, sigma_max, extra_args=None, callback=None, disable=None, order=3, rtol=0.05, atol=0.0078, h_init=0.05, pcoeff=0., icoeff=1., dcoeff=0., accept_safety=0.81, eta=0., s_noise=1., noise_sampler=None, return_info=False): + """DPM-Solver-12 and 23 (adaptive step size). See https://arxiv.org/abs/2206.00927.""" + if sigma_min <= 0 or sigma_max <= 0: + raise ValueError('sigma_min and sigma_max must not be 0') + with tqdm(disable=disable) as pbar: + dpm_solver = DPMSolver(model, extra_args, eps_callback=pbar.update) + if callback is not None: + dpm_solver.info_callback = lambda info: callback({'sigma': dpm_solver.sigma(info['t']), 'sigma_hat': dpm_solver.sigma(info['t_up']), **info}) + x, info = dpm_solver.dpm_solver_adaptive(x, dpm_solver.t(torch.tensor(sigma_max)), dpm_solver.t(torch.tensor(sigma_min)), order, rtol, atol, h_init, pcoeff, icoeff, dcoeff, accept_safety, eta, s_noise, noise_sampler) + if return_info: + return x, info + return x + + +@torch.no_grad() +def sample_dpmpp_2s_ancestral(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None): + """Ancestral sampling with DPM-Solver++(2S) second-order steps.""" + extra_args = {} if extra_args is None else extra_args + noise_sampler = default_noise_sampler(x) if noise_sampler is None else noise_sampler + s_in = x.new_ones([x.shape[0]]) + sigma_fn = lambda t: t.neg().exp() + t_fn = lambda sigma: sigma.log().neg() + + for i in trange(len(sigmas) - 1, disable=disable): + denoised = model(x, sigmas[i] * s_in, **extra_args) + sigma_down, sigma_up = get_ancestral_step(sigmas[i], sigmas[i + 1], eta=eta) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) + if sigma_down == 0: + # Euler method + d = to_d(x, sigmas[i], denoised) + dt = sigma_down - sigmas[i] + x = x + d * dt + else: + # DPM-Solver++(2S) + t, t_next = t_fn(sigmas[i]), t_fn(sigma_down) + r = 1 / 2 + h = t_next - t + s = t + r * h + x_2 = (sigma_fn(s) / sigma_fn(t)) * x - (-h * r).expm1() * denoised + denoised_2 = model(x_2, sigma_fn(s) * s_in, **extra_args) + x = (sigma_fn(t_next) / sigma_fn(t)) * x - (-h).expm1() * denoised_2 + # Noise addition + if sigmas[i + 1] > 0: + x = x + noise_sampler(sigmas[i], sigmas[i + 1]) * s_noise * sigma_up + return x + + +@torch.no_grad() +def sample_dpmpp_sde(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None, r=1 / 2): + """DPM-Solver++ (stochastic).""" + sigma_min, sigma_max = sigmas[sigmas > 0].min(), sigmas.max() + seed = extra_args.get("seed", None) + noise_sampler = BrownianTreeNoiseSampler(x, sigma_min, sigma_max, seed=seed, cpu=True) if noise_sampler is None else noise_sampler + extra_args = {} if extra_args is None else extra_args + s_in = x.new_ones([x.shape[0]]) + sigma_fn = lambda t: t.neg().exp() + t_fn = lambda sigma: sigma.log().neg() + + for i in trange(len(sigmas) - 1, disable=disable): + denoised = model(x, sigmas[i] * s_in, **extra_args) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) + if sigmas[i + 1] == 0: + # Euler method + d = to_d(x, sigmas[i], denoised) + dt = sigmas[i + 1] - sigmas[i] + x = x + d * dt + else: + # DPM-Solver++ + t, t_next = t_fn(sigmas[i]), t_fn(sigmas[i + 1]) + h = t_next - t + s = t + h * r + fac = 1 / (2 * r) + + # Step 1 + sd, su = get_ancestral_step(sigma_fn(t), sigma_fn(s), eta) + s_ = t_fn(sd) + x_2 = (sigma_fn(s_) / sigma_fn(t)) * x - (t - s_).expm1() * denoised + x_2 = x_2 + noise_sampler(sigma_fn(t), sigma_fn(s)) * s_noise * su + denoised_2 = model(x_2, sigma_fn(s) * s_in, **extra_args) + + # Step 2 + sd, su = get_ancestral_step(sigma_fn(t), sigma_fn(t_next), eta) + t_next_ = t_fn(sd) + denoised_d = (1 - fac) * denoised + fac * denoised_2 + x = (sigma_fn(t_next_) / sigma_fn(t)) * x - (t - t_next_).expm1() * denoised_d + x = x + noise_sampler(sigma_fn(t), sigma_fn(t_next)) * s_noise * su + return x + + +@torch.no_grad() +def sample_dpmpp_2m(model, x, sigmas, extra_args=None, callback=None, disable=None): + """DPM-Solver++(2M).""" + extra_args = {} if extra_args is None else extra_args + s_in = x.new_ones([x.shape[0]]) + sigma_fn = lambda t: t.neg().exp() + t_fn = lambda sigma: sigma.log().neg() + old_denoised = None + + for i in trange(len(sigmas) - 1, disable=disable): + denoised = model(x, sigmas[i] * s_in, **extra_args) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) + t, t_next = t_fn(sigmas[i]), t_fn(sigmas[i + 1]) + h = t_next - t + if old_denoised is None or sigmas[i + 1] == 0: + x = (sigma_fn(t_next) / sigma_fn(t)) * x - (-h).expm1() * denoised + else: + h_last = t - t_fn(sigmas[i - 1]) + r = h_last / h + denoised_d = (1 + 1 / (2 * r)) * denoised - (1 / (2 * r)) * old_denoised + x = (sigma_fn(t_next) / sigma_fn(t)) * x - (-h).expm1() * denoised_d + old_denoised = denoised + return x + +@torch.no_grad() +def sample_dpmpp_2m_sde(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None, solver_type='midpoint'): + """DPM-Solver++(2M) SDE.""" + + if solver_type not in {'heun', 'midpoint'}: + raise ValueError('solver_type must be \'heun\' or \'midpoint\'') + + seed = extra_args.get("seed", None) + sigma_min, sigma_max = sigmas[sigmas > 0].min(), sigmas.max() + noise_sampler = BrownianTreeNoiseSampler(x, sigma_min, sigma_max, seed=seed, cpu=True) if noise_sampler is None else noise_sampler + extra_args = {} if extra_args is None else extra_args + s_in = x.new_ones([x.shape[0]]) + + old_denoised = None + h_last = None + h = None + + for i in trange(len(sigmas) - 1, disable=disable): + denoised = model(x, sigmas[i] * s_in, **extra_args) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) + if sigmas[i + 1] == 0: + # Denoising step + x = denoised + else: + # DPM-Solver++(2M) SDE + t, s = -sigmas[i].log(), -sigmas[i + 1].log() + h = s - t + eta_h = eta * h + + x = sigmas[i + 1] / sigmas[i] * (-eta_h).exp() * x + (-h - eta_h).expm1().neg() * denoised + + if old_denoised is not None: + r = h_last / h + if solver_type == 'heun': + x = x + ((-h - eta_h).expm1().neg() / (-h - eta_h) + 1) * (1 / r) * (denoised - old_denoised) + elif solver_type == 'midpoint': + x = x + 0.5 * (-h - eta_h).expm1().neg() * (1 / r) * (denoised - old_denoised) + + if eta: + x = x + noise_sampler(sigmas[i], sigmas[i + 1]) * sigmas[i + 1] * (-2 * eta_h).expm1().neg().sqrt() * s_noise + + old_denoised = denoised + h_last = h + return x + +@torch.no_grad() +def sample_dpmpp_3m_sde(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None): + """DPM-Solver++(3M) SDE.""" + + seed = extra_args.get("seed", None) + sigma_min, sigma_max = sigmas[sigmas > 0].min(), sigmas.max() + noise_sampler = BrownianTreeNoiseSampler(x, sigma_min, sigma_max, seed=seed, cpu=True) if noise_sampler is None else noise_sampler + extra_args = {} if extra_args is None else extra_args + s_in = x.new_ones([x.shape[0]]) + + denoised_1, denoised_2 = None, None + h, h_1, h_2 = None, None, None + + for i in trange(len(sigmas) - 1, disable=disable): + denoised = model(x, sigmas[i] * s_in, **extra_args) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) + if sigmas[i + 1] == 0: + # Denoising step + x = denoised + else: + t, s = -sigmas[i].log(), -sigmas[i + 1].log() + h = s - t + h_eta = h * (eta + 1) + + x = torch.exp(-h_eta) * x + (-h_eta).expm1().neg() * denoised + + if h_2 is not None: + r0 = h_1 / h + r1 = h_2 / h + d1_0 = (denoised - denoised_1) / r0 + d1_1 = (denoised_1 - denoised_2) / r1 + d1 = d1_0 + (d1_0 - d1_1) * r0 / (r0 + r1) + d2 = (d1_0 - d1_1) / (r0 + r1) + phi_2 = h_eta.neg().expm1() / h_eta + 1 + phi_3 = phi_2 / h_eta - 0.5 + x = x + phi_2 * d1 - phi_3 * d2 + elif h_1 is not None: + r = h_1 / h + d = (denoised - denoised_1) / r + phi_2 = h_eta.neg().expm1() / h_eta + 1 + x = x + phi_2 * d + + if eta: + x = x + noise_sampler(sigmas[i], sigmas[i + 1]) * sigmas[i + 1] * (-2 * h * eta).expm1().neg().sqrt() * s_noise + + denoised_1, denoised_2 = denoised, denoised_1 + h_1, h_2 = h, h_1 + return x + +@torch.no_grad() +def sample_dpmpp_3m_sde_gpu(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None): + sigma_min, sigma_max = sigmas[sigmas > 0].min(), sigmas.max() + noise_sampler = BrownianTreeNoiseSampler(x, sigma_min, sigma_max, seed=extra_args.get("seed", None), cpu=False) if noise_sampler is None else noise_sampler + return sample_dpmpp_3m_sde(model, x, sigmas, extra_args=extra_args, callback=callback, disable=disable, eta=eta, s_noise=s_noise, noise_sampler=noise_sampler) + +@torch.no_grad() +def sample_dpmpp_2m_sde_gpu(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None, solver_type='midpoint'): + sigma_min, sigma_max = sigmas[sigmas > 0].min(), sigmas.max() + noise_sampler = BrownianTreeNoiseSampler(x, sigma_min, sigma_max, seed=extra_args.get("seed", None), cpu=False) if noise_sampler is None else noise_sampler + return sample_dpmpp_2m_sde(model, x, sigmas, extra_args=extra_args, callback=callback, disable=disable, eta=eta, s_noise=s_noise, noise_sampler=noise_sampler, solver_type=solver_type) + +@torch.no_grad() +def sample_dpmpp_sde_gpu(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None, r=1 / 2): + sigma_min, sigma_max = sigmas[sigmas > 0].min(), sigmas.max() + noise_sampler = BrownianTreeNoiseSampler(x, sigma_min, sigma_max, seed=extra_args.get("seed", None), cpu=False) if noise_sampler is None else noise_sampler + return sample_dpmpp_sde(model, x, sigmas, extra_args=extra_args, callback=callback, disable=disable, eta=eta, s_noise=s_noise, noise_sampler=noise_sampler, r=r) + + +def DDPMSampler_step(x, sigma, sigma_prev, noise, noise_sampler): + alpha_cumprod = 1 / ((sigma * sigma) + 1) + alpha_cumprod_prev = 1 / ((sigma_prev * sigma_prev) + 1) + alpha = (alpha_cumprod / alpha_cumprod_prev) + + mu = (1.0 / alpha).sqrt() * (x - (1 - alpha) * noise / (1 - alpha_cumprod).sqrt()) + if sigma_prev > 0: + mu += ((1 - alpha) * (1. - alpha_cumprod_prev) / (1. - alpha_cumprod)).sqrt() * noise_sampler(sigma, sigma_prev) + return mu + +def generic_step_sampler(model, x, sigmas, extra_args=None, callback=None, disable=None, noise_sampler=None, step_function=None): + extra_args = {} if extra_args is None else extra_args + noise_sampler = default_noise_sampler(x) if noise_sampler is None else noise_sampler + s_in = x.new_ones([x.shape[0]]) + + for i in trange(len(sigmas) - 1, disable=disable): + denoised = model(x, sigmas[i] * s_in, **extra_args) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) + x = step_function(x / torch.sqrt(1.0 + sigmas[i] ** 2.0), sigmas[i], sigmas[i + 1], (x - denoised) / sigmas[i], noise_sampler) + if sigmas[i + 1] != 0: + x *= torch.sqrt(1.0 + sigmas[i + 1] ** 2.0) + return x + + +@torch.no_grad() +def sample_ddpm(model, x, sigmas, extra_args=None, callback=None, disable=None, noise_sampler=None): + return generic_step_sampler(model, x, sigmas, extra_args, callback, disable, noise_sampler, DDPMSampler_step) + +@torch.no_grad() +def sample_lcm(model, x, sigmas, extra_args=None, callback=None, disable=None, noise_sampler=None): + extra_args = {} if extra_args is None else extra_args + noise_sampler = default_noise_sampler(x) if noise_sampler is None else noise_sampler + s_in = x.new_ones([x.shape[0]]) + for i in trange(len(sigmas) - 1, disable=disable): + denoised = model(x, sigmas[i] * s_in, **extra_args) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) + + x = denoised + if sigmas[i + 1] > 0: + x += sigmas[i + 1] * noise_sampler(sigmas[i], sigmas[i + 1]) + return x + + + +@torch.no_grad() +def sample_heunpp2(model, x, sigmas, extra_args=None, callback=None, disable=None, s_churn=0., s_tmin=0., s_tmax=float('inf'), s_noise=1.): + # From MIT licensed: https://github.com/Carzit/sd-webui-samplers-scheduler/ + extra_args = {} if extra_args is None else extra_args + s_in = x.new_ones([x.shape[0]]) + s_end = sigmas[-1] + for i in trange(len(sigmas) - 1, disable=disable): + gamma = min(s_churn / (len(sigmas) - 1), 2 ** 0.5 - 1) if s_tmin <= sigmas[i] <= s_tmax else 0. + eps = torch.randn_like(x) * s_noise + sigma_hat = sigmas[i] * (gamma + 1) + if gamma > 0: + x = x + eps * (sigma_hat ** 2 - sigmas[i] ** 2) ** 0.5 + denoised = model(x, sigma_hat * s_in, **extra_args) + d = to_d(x, sigma_hat, denoised) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigma_hat, 'denoised': denoised}) + dt = sigmas[i + 1] - sigma_hat + if sigmas[i + 1] == s_end: + # Euler method + x = x + d * dt + elif sigmas[i + 2] == s_end: + + # Heun's method + x_2 = x + d * dt + denoised_2 = model(x_2, sigmas[i + 1] * s_in, **extra_args) + d_2 = to_d(x_2, sigmas[i + 1], denoised_2) + + w = 2 * sigmas[0] + w2 = sigmas[i+1]/w + w1 = 1 - w2 + + d_prime = d * w1 + d_2 * w2 + + + x = x + d_prime * dt + + else: + # Heun++ + x_2 = x + d * dt + denoised_2 = model(x_2, sigmas[i + 1] * s_in, **extra_args) + d_2 = to_d(x_2, sigmas[i + 1], denoised_2) + dt_2 = sigmas[i + 2] - sigmas[i + 1] + + x_3 = x_2 + d_2 * dt_2 + denoised_3 = model(x_3, sigmas[i + 2] * s_in, **extra_args) + d_3 = to_d(x_3, sigmas[i + 2], denoised_3) + + w = 3 * sigmas[0] + w2 = sigmas[i + 1] / w + w3 = sigmas[i + 2] / w + w1 = 1 - w2 - w3 + + d_prime = w1 * d + w2 * d_2 + w3 * d_3 + x = x + d_prime * dt + return x diff --git a/ldm_patched/k_diffusion/utils.py b/ldm_patched/k_diffusion/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..a644df2f3cf82b32ac6e9bf2cb7bfc70c95e05f9 --- /dev/null +++ b/ldm_patched/k_diffusion/utils.py @@ -0,0 +1,313 @@ +from contextlib import contextmanager +import hashlib +import math +from pathlib import Path +import shutil +import urllib +import warnings + +from PIL import Image +import torch +from torch import nn, optim +from torch.utils import data + + +def hf_datasets_augs_helper(examples, transform, image_key, mode='RGB'): + """Apply passed in transforms for HuggingFace Datasets.""" + images = [transform(image.convert(mode)) for image in examples[image_key]] + return {image_key: images} + + +def append_dims(x, target_dims): + """Appends dimensions to the end of a tensor until it has target_dims dimensions.""" + dims_to_append = target_dims - x.ndim + if dims_to_append < 0: + raise ValueError(f'input has {x.ndim} dims but target_dims is {target_dims}, which is less') + expanded = x[(...,) + (None,) * dims_to_append] + # MPS will get inf values if it tries to index into the new axes, but detaching fixes this. + # https://github.com/pytorch/pytorch/issues/84364 + return expanded.detach().clone() if expanded.device.type == 'mps' else expanded + + +def n_params(module): + """Returns the number of trainable parameters in a module.""" + return sum(p.numel() for p in module.parameters()) + + +def download_file(path, url, digest=None): + """Downloads a file if it does not exist, optionally checking its SHA-256 hash.""" + path = Path(path) + path.parent.mkdir(parents=True, exist_ok=True) + if not path.exists(): + with urllib.request.urlopen(url) as response, open(path, 'wb') as f: + shutil.copyfileobj(response, f) + if digest is not None: + file_digest = hashlib.sha256(open(path, 'rb').read()).hexdigest() + if digest != file_digest: + raise OSError(f'hash of {path} (url: {url}) failed to validate') + return path + + +@contextmanager +def train_mode(model, mode=True): + """A context manager that places a model into training mode and restores + the previous mode on exit.""" + modes = [module.training for module in model.modules()] + try: + yield model.train(mode) + finally: + for i, module in enumerate(model.modules()): + module.training = modes[i] + + +def eval_mode(model): + """A context manager that places a model into evaluation mode and restores + the previous mode on exit.""" + return train_mode(model, False) + + +@torch.no_grad() +def ema_update(model, averaged_model, decay): + """Incorporates updated model parameters into an exponential moving averaged + version of a model. It should be called after each optimizer step.""" + model_params = dict(model.named_parameters()) + averaged_params = dict(averaged_model.named_parameters()) + assert model_params.keys() == averaged_params.keys() + + for name, param in model_params.items(): + averaged_params[name].mul_(decay).add_(param, alpha=1 - decay) + + model_buffers = dict(model.named_buffers()) + averaged_buffers = dict(averaged_model.named_buffers()) + assert model_buffers.keys() == averaged_buffers.keys() + + for name, buf in model_buffers.items(): + averaged_buffers[name].copy_(buf) + + +class EMAWarmup: + """Implements an EMA warmup using an inverse decay schedule. + If inv_gamma=1 and power=1, implements a simple average. inv_gamma=1, power=2/3 are + good values for models you plan to train for a million or more steps (reaches decay + factor 0.999 at 31.6K steps, 0.9999 at 1M steps), inv_gamma=1, power=3/4 for models + you plan to train for less (reaches decay factor 0.999 at 10K steps, 0.9999 at + 215.4k steps). + Args: + inv_gamma (float): Inverse multiplicative factor of EMA warmup. Default: 1. + power (float): Exponential factor of EMA warmup. Default: 1. + min_value (float): The minimum EMA decay rate. Default: 0. + max_value (float): The maximum EMA decay rate. Default: 1. + start_at (int): The epoch to start averaging at. Default: 0. + last_epoch (int): The index of last epoch. Default: 0. + """ + + def __init__(self, inv_gamma=1., power=1., min_value=0., max_value=1., start_at=0, + last_epoch=0): + self.inv_gamma = inv_gamma + self.power = power + self.min_value = min_value + self.max_value = max_value + self.start_at = start_at + self.last_epoch = last_epoch + + def state_dict(self): + """Returns the state of the class as a :class:`dict`.""" + return dict(self.__dict__.items()) + + def load_state_dict(self, state_dict): + """Loads the class's state. + Args: + state_dict (dict): scaler state. Should be an object returned + from a call to :meth:`state_dict`. + """ + self.__dict__.update(state_dict) + + def get_value(self): + """Gets the current EMA decay rate.""" + epoch = max(0, self.last_epoch - self.start_at) + value = 1 - (1 + epoch / self.inv_gamma) ** -self.power + return 0. if epoch < 0 else min(self.max_value, max(self.min_value, value)) + + def step(self): + """Updates the step count.""" + self.last_epoch += 1 + + +class InverseLR(optim.lr_scheduler._LRScheduler): + """Implements an inverse decay learning rate schedule with an optional exponential + warmup. When last_epoch=-1, sets initial lr as lr. + inv_gamma is the number of steps/epochs required for the learning rate to decay to + (1 / 2)**power of its original value. + Args: + optimizer (Optimizer): Wrapped optimizer. + inv_gamma (float): Inverse multiplicative factor of learning rate decay. Default: 1. + power (float): Exponential factor of learning rate decay. Default: 1. + warmup (float): Exponential warmup factor (0 <= warmup < 1, 0 to disable) + Default: 0. + min_lr (float): The minimum learning rate. Default: 0. + last_epoch (int): The index of last epoch. Default: -1. + verbose (bool): If ``True``, prints a message to stdout for + each update. Default: ``False``. + """ + + def __init__(self, optimizer, inv_gamma=1., power=1., warmup=0., min_lr=0., + last_epoch=-1, verbose=False): + self.inv_gamma = inv_gamma + self.power = power + if not 0. <= warmup < 1: + raise ValueError('Invalid value for warmup') + self.warmup = warmup + self.min_lr = min_lr + super().__init__(optimizer, last_epoch, verbose) + + def get_lr(self): + if not self._get_lr_called_within_step: + warnings.warn("To get the last learning rate computed by the scheduler, " + "please use `get_last_lr()`.") + + return self._get_closed_form_lr() + + def _get_closed_form_lr(self): + warmup = 1 - self.warmup ** (self.last_epoch + 1) + lr_mult = (1 + self.last_epoch / self.inv_gamma) ** -self.power + return [warmup * max(self.min_lr, base_lr * lr_mult) + for base_lr in self.base_lrs] + + +class ExponentialLR(optim.lr_scheduler._LRScheduler): + """Implements an exponential learning rate schedule with an optional exponential + warmup. When last_epoch=-1, sets initial lr as lr. Decays the learning rate + continuously by decay (default 0.5) every num_steps steps. + Args: + optimizer (Optimizer): Wrapped optimizer. + num_steps (float): The number of steps to decay the learning rate by decay in. + decay (float): The factor by which to decay the learning rate every num_steps + steps. Default: 0.5. + warmup (float): Exponential warmup factor (0 <= warmup < 1, 0 to disable) + Default: 0. + min_lr (float): The minimum learning rate. Default: 0. + last_epoch (int): The index of last epoch. Default: -1. + verbose (bool): If ``True``, prints a message to stdout for + each update. Default: ``False``. + """ + + def __init__(self, optimizer, num_steps, decay=0.5, warmup=0., min_lr=0., + last_epoch=-1, verbose=False): + self.num_steps = num_steps + self.decay = decay + if not 0. <= warmup < 1: + raise ValueError('Invalid value for warmup') + self.warmup = warmup + self.min_lr = min_lr + super().__init__(optimizer, last_epoch, verbose) + + def get_lr(self): + if not self._get_lr_called_within_step: + warnings.warn("To get the last learning rate computed by the scheduler, " + "please use `get_last_lr()`.") + + return self._get_closed_form_lr() + + def _get_closed_form_lr(self): + warmup = 1 - self.warmup ** (self.last_epoch + 1) + lr_mult = (self.decay ** (1 / self.num_steps)) ** self.last_epoch + return [warmup * max(self.min_lr, base_lr * lr_mult) + for base_lr in self.base_lrs] + + +def rand_log_normal(shape, loc=0., scale=1., device='cpu', dtype=torch.float32): + """Draws samples from an lognormal distribution.""" + return (torch.randn(shape, device=device, dtype=dtype) * scale + loc).exp() + + +def rand_log_logistic(shape, loc=0., scale=1., min_value=0., max_value=float('inf'), device='cpu', dtype=torch.float32): + """Draws samples from an optionally truncated log-logistic distribution.""" + min_value = torch.as_tensor(min_value, device=device, dtype=torch.float64) + max_value = torch.as_tensor(max_value, device=device, dtype=torch.float64) + min_cdf = min_value.log().sub(loc).div(scale).sigmoid() + max_cdf = max_value.log().sub(loc).div(scale).sigmoid() + u = torch.rand(shape, device=device, dtype=torch.float64) * (max_cdf - min_cdf) + min_cdf + return u.logit().mul(scale).add(loc).exp().to(dtype) + + +def rand_log_uniform(shape, min_value, max_value, device='cpu', dtype=torch.float32): + """Draws samples from an log-uniform distribution.""" + min_value = math.log(min_value) + max_value = math.log(max_value) + return (torch.rand(shape, device=device, dtype=dtype) * (max_value - min_value) + min_value).exp() + + +def rand_v_diffusion(shape, sigma_data=1., min_value=0., max_value=float('inf'), device='cpu', dtype=torch.float32): + """Draws samples from a truncated v-diffusion training timestep distribution.""" + min_cdf = math.atan(min_value / sigma_data) * 2 / math.pi + max_cdf = math.atan(max_value / sigma_data) * 2 / math.pi + u = torch.rand(shape, device=device, dtype=dtype) * (max_cdf - min_cdf) + min_cdf + return torch.tan(u * math.pi / 2) * sigma_data + + +def rand_split_log_normal(shape, loc, scale_1, scale_2, device='cpu', dtype=torch.float32): + """Draws samples from a split lognormal distribution.""" + n = torch.randn(shape, device=device, dtype=dtype).abs() + u = torch.rand(shape, device=device, dtype=dtype) + n_left = n * -scale_1 + loc + n_right = n * scale_2 + loc + ratio = scale_1 / (scale_1 + scale_2) + return torch.where(u < ratio, n_left, n_right).exp() + + +class FolderOfImages(data.Dataset): + """Recursively finds all images in a directory. It does not support + classes/targets.""" + + IMG_EXTENSIONS = {'.jpg', '.jpeg', '.png', '.ppm', '.bmp', '.pgm', '.tif', '.tiff', '.webp'} + + def __init__(self, root, transform=None): + super().__init__() + self.root = Path(root) + self.transform = nn.Identity() if transform is None else transform + self.paths = sorted(path for path in self.root.rglob('*') if path.suffix.lower() in self.IMG_EXTENSIONS) + + def __repr__(self): + return f'FolderOfImages(root="{self.root}", len: {len(self)})' + + def __len__(self): + return len(self.paths) + + def __getitem__(self, key): + path = self.paths[key] + with open(path, 'rb') as f: + image = Image.open(f).convert('RGB') + image = self.transform(image) + return image, + + +class CSVLogger: + def __init__(self, filename, columns): + self.filename = Path(filename) + self.columns = columns + if self.filename.exists(): + self.file = open(self.filename, 'a') + else: + self.file = open(self.filename, 'w') + self.write(*self.columns) + + def write(self, *args): + print(*args, sep=',', file=self.file, flush=True) + + +@contextmanager +def tf32_mode(cudnn=None, matmul=None): + """A context manager that sets whether TF32 is allowed on cuDNN or matmul.""" + cudnn_old = torch.backends.cudnn.allow_tf32 + matmul_old = torch.backends.cuda.matmul.allow_tf32 + try: + if cudnn is not None: + torch.backends.cudnn.allow_tf32 = cudnn + if matmul is not None: + torch.backends.cuda.matmul.allow_tf32 = matmul + yield + finally: + if cudnn is not None: + torch.backends.cudnn.allow_tf32 = cudnn_old + if matmul is not None: + torch.backends.cuda.matmul.allow_tf32 = matmul_old diff --git a/ldm_patched/ldm/__pycache__/util.cpython-310.pyc b/ldm_patched/ldm/__pycache__/util.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..16498fb6d752f04674a3222ec5ae5a803bf48e75 Binary files /dev/null and b/ldm_patched/ldm/__pycache__/util.cpython-310.pyc differ diff --git a/ldm_patched/ldm/models/__pycache__/autoencoder.cpython-310.pyc b/ldm_patched/ldm/models/__pycache__/autoencoder.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c420f265bc5392ef34bd7c70f270214b15619713 Binary files /dev/null and b/ldm_patched/ldm/models/__pycache__/autoencoder.cpython-310.pyc differ diff --git a/ldm_patched/ldm/models/autoencoder.py b/ldm_patched/ldm/models/autoencoder.py new file mode 100644 index 0000000000000000000000000000000000000000..c809a0c31de0b4bf3bbb07395257e9014b4e3435 --- /dev/null +++ b/ldm_patched/ldm/models/autoencoder.py @@ -0,0 +1,228 @@ +import torch +# import pytorch_lightning as pl +import torch.nn.functional as F +from contextlib import contextmanager +from typing import Any, Dict, List, Optional, Tuple, Union + +from ldm_patched.ldm.modules.distributions.distributions import DiagonalGaussianDistribution + +from ldm_patched.ldm.util import instantiate_from_config +from ldm_patched.ldm.modules.ema import LitEma +import ldm_patched.modules.ops + +class DiagonalGaussianRegularizer(torch.nn.Module): + def __init__(self, sample: bool = True): + super().__init__() + self.sample = sample + + def get_trainable_parameters(self) -> Any: + yield from () + + def forward(self, z: torch.Tensor) -> Tuple[torch.Tensor, dict]: + log = dict() + posterior = DiagonalGaussianDistribution(z) + if self.sample: + z = posterior.sample() + else: + z = posterior.mode() + kl_loss = posterior.kl() + kl_loss = torch.sum(kl_loss) / kl_loss.shape[0] + log["kl_loss"] = kl_loss + return z, log + + +class AbstractAutoencoder(torch.nn.Module): + """ + This is the base class for all autoencoders, including image autoencoders, image autoencoders with discriminators, + unCLIP models, etc. Hence, it is fairly general, and specific features + (e.g. discriminator training, encoding, decoding) must be implemented in subclasses. + """ + + def __init__( + self, + ema_decay: Union[None, float] = None, + monitor: Union[None, str] = None, + input_key: str = "jpg", + **kwargs, + ): + super().__init__() + + self.input_key = input_key + self.use_ema = ema_decay is not None + if monitor is not None: + self.monitor = monitor + + if self.use_ema: + self.model_ema = LitEma(self, decay=ema_decay) + logpy.info(f"Keeping EMAs of {len(list(self.model_ema.buffers()))}.") + + def get_input(self, batch) -> Any: + raise NotImplementedError() + + def on_train_batch_end(self, *args, **kwargs): + # for EMA computation + if self.use_ema: + self.model_ema(self) + + @contextmanager + def ema_scope(self, context=None): + if self.use_ema: + self.model_ema.store(self.parameters()) + self.model_ema.copy_to(self) + if context is not None: + logpy.info(f"{context}: Switched to EMA weights") + try: + yield None + finally: + if self.use_ema: + self.model_ema.restore(self.parameters()) + if context is not None: + logpy.info(f"{context}: Restored training weights") + + def encode(self, *args, **kwargs) -> torch.Tensor: + raise NotImplementedError("encode()-method of abstract base class called") + + def decode(self, *args, **kwargs) -> torch.Tensor: + raise NotImplementedError("decode()-method of abstract base class called") + + def instantiate_optimizer_from_config(self, params, lr, cfg): + logpy.info(f"loading >>> {cfg['target']} <<< optimizer from config") + return get_obj_from_str(cfg["target"])( + params, lr=lr, **cfg.get("params", dict()) + ) + + def configure_optimizers(self) -> Any: + raise NotImplementedError() + + +class AutoencodingEngine(AbstractAutoencoder): + """ + Base class for all image autoencoders that we train, like VQGAN or AutoencoderKL + (we also restore them explicitly as special cases for legacy reasons). + Regularizations such as KL or VQ are moved to the regularizer class. + """ + + def __init__( + self, + *args, + encoder_config: Dict, + decoder_config: Dict, + regularizer_config: Dict, + **kwargs, + ): + super().__init__(*args, **kwargs) + + self.encoder: torch.nn.Module = instantiate_from_config(encoder_config) + self.decoder: torch.nn.Module = instantiate_from_config(decoder_config) + self.regularization: AbstractRegularizer = instantiate_from_config( + regularizer_config + ) + + def get_last_layer(self): + return self.decoder.get_last_layer() + + def encode( + self, + x: torch.Tensor, + return_reg_log: bool = False, + unregularized: bool = False, + ) -> Union[torch.Tensor, Tuple[torch.Tensor, dict]]: + z = self.encoder(x) + if unregularized: + return z, dict() + z, reg_log = self.regularization(z) + if return_reg_log: + return z, reg_log + return z + + def decode(self, z: torch.Tensor, **kwargs) -> torch.Tensor: + x = self.decoder(z, **kwargs) + return x + + def forward( + self, x: torch.Tensor, **additional_decode_kwargs + ) -> Tuple[torch.Tensor, torch.Tensor, dict]: + z, reg_log = self.encode(x, return_reg_log=True) + dec = self.decode(z, **additional_decode_kwargs) + return z, dec, reg_log + + +class AutoencodingEngineLegacy(AutoencodingEngine): + def __init__(self, embed_dim: int, **kwargs): + self.max_batch_size = kwargs.pop("max_batch_size", None) + ddconfig = kwargs.pop("ddconfig") + super().__init__( + encoder_config={ + "target": "ldm_patched.ldm.modules.diffusionmodules.model.Encoder", + "params": ddconfig, + }, + decoder_config={ + "target": "ldm_patched.ldm.modules.diffusionmodules.model.Decoder", + "params": ddconfig, + }, + **kwargs, + ) + self.quant_conv = ldm_patched.modules.ops.disable_weight_init.Conv2d( + (1 + ddconfig["double_z"]) * ddconfig["z_channels"], + (1 + ddconfig["double_z"]) * embed_dim, + 1, + ) + self.post_quant_conv = ldm_patched.modules.ops.disable_weight_init.Conv2d(embed_dim, ddconfig["z_channels"], 1) + self.embed_dim = embed_dim + + def get_autoencoder_params(self) -> list: + params = super().get_autoencoder_params() + return params + + def encode( + self, x: torch.Tensor, return_reg_log: bool = False + ) -> Union[torch.Tensor, Tuple[torch.Tensor, dict]]: + if self.max_batch_size is None: + z = self.encoder(x) + z = self.quant_conv(z) + else: + N = x.shape[0] + bs = self.max_batch_size + n_batches = int(math.ceil(N / bs)) + z = list() + for i_batch in range(n_batches): + z_batch = self.encoder(x[i_batch * bs : (i_batch + 1) * bs]) + z_batch = self.quant_conv(z_batch) + z.append(z_batch) + z = torch.cat(z, 0) + + z, reg_log = self.regularization(z) + if return_reg_log: + return z, reg_log + return z + + def decode(self, z: torch.Tensor, **decoder_kwargs) -> torch.Tensor: + if self.max_batch_size is None: + dec = self.post_quant_conv(z) + dec = self.decoder(dec, **decoder_kwargs) + else: + N = z.shape[0] + bs = self.max_batch_size + n_batches = int(math.ceil(N / bs)) + dec = list() + for i_batch in range(n_batches): + dec_batch = self.post_quant_conv(z[i_batch * bs : (i_batch + 1) * bs]) + dec_batch = self.decoder(dec_batch, **decoder_kwargs) + dec.append(dec_batch) + dec = torch.cat(dec, 0) + + return dec + + +class AutoencoderKL(AutoencodingEngineLegacy): + def __init__(self, **kwargs): + if "lossconfig" in kwargs: + kwargs["loss_config"] = kwargs.pop("lossconfig") + super().__init__( + regularizer_config={ + "target": ( + "ldm_patched.ldm.models.autoencoder.DiagonalGaussianRegularizer" + ) + }, + **kwargs, + ) diff --git a/ldm_patched/ldm/modules/__pycache__/attention.cpython-310.pyc b/ldm_patched/ldm/modules/__pycache__/attention.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6abda13a86f4d8822845b22b55162859aec0b1e0 Binary files /dev/null and b/ldm_patched/ldm/modules/__pycache__/attention.cpython-310.pyc differ diff --git a/ldm_patched/ldm/modules/__pycache__/ema.cpython-310.pyc b/ldm_patched/ldm/modules/__pycache__/ema.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..aab1e8bf62b21b12ff07108911491ff9809b7419 Binary files /dev/null and b/ldm_patched/ldm/modules/__pycache__/ema.cpython-310.pyc differ diff --git a/ldm_patched/ldm/modules/__pycache__/sub_quadratic_attention.cpython-310.pyc b/ldm_patched/ldm/modules/__pycache__/sub_quadratic_attention.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..48030cd791d2a030408654c547f28434732da03c Binary files /dev/null and b/ldm_patched/ldm/modules/__pycache__/sub_quadratic_attention.cpython-310.pyc differ diff --git a/ldm_patched/ldm/modules/attention.py b/ldm_patched/ldm/modules/attention.py new file mode 100644 index 0000000000000000000000000000000000000000..e10a868d26a4fb04292df4b6582711c660b17c40 --- /dev/null +++ b/ldm_patched/ldm/modules/attention.py @@ -0,0 +1,781 @@ +import math +import torch +import torch.nn.functional as F +from torch import nn, einsum +from einops import rearrange, repeat +from typing import Optional, Any + +from .diffusionmodules.util import checkpoint, AlphaBlender, timestep_embedding +from .sub_quadratic_attention import efficient_dot_product_attention + +from ldm_patched.modules import model_management + +if model_management.xformers_enabled(): + import xformers + import xformers.ops + +from ldm_patched.modules.args_parser import args +import ldm_patched.modules.ops +ops = ldm_patched.modules.ops.disable_weight_init + +# CrossAttn precision handling +if args.disable_attention_upcast: + print("disabling upcasting of attention") + _ATTN_PRECISION = "fp16" +else: + _ATTN_PRECISION = "fp32" + + +def exists(val): + return val is not None + + +def uniq(arr): + return{el: True for el in arr}.keys() + + +def default(val, d): + if exists(val): + return val + return d + + +def max_neg_value(t): + return -torch.finfo(t.dtype).max + + +def init_(tensor): + dim = tensor.shape[-1] + std = 1 / math.sqrt(dim) + tensor.uniform_(-std, std) + return tensor + + +# feedforward +class GEGLU(nn.Module): + def __init__(self, dim_in, dim_out, dtype=None, device=None, operations=ops): + super().__init__() + self.proj = operations.Linear(dim_in, dim_out * 2, dtype=dtype, device=device) + + def forward(self, x): + x, gate = self.proj(x).chunk(2, dim=-1) + return x * F.gelu(gate) + + +class FeedForward(nn.Module): + def __init__(self, dim, dim_out=None, mult=4, glu=False, dropout=0., dtype=None, device=None, operations=ops): + super().__init__() + inner_dim = int(dim * mult) + dim_out = default(dim_out, dim) + project_in = nn.Sequential( + operations.Linear(dim, inner_dim, dtype=dtype, device=device), + nn.GELU() + ) if not glu else GEGLU(dim, inner_dim, dtype=dtype, device=device, operations=operations) + + self.net = nn.Sequential( + project_in, + nn.Dropout(dropout), + operations.Linear(inner_dim, dim_out, dtype=dtype, device=device) + ) + + def forward(self, x): + return self.net(x) + +def Normalize(in_channels, dtype=None, device=None): + return torch.nn.GroupNorm(num_groups=32, num_channels=in_channels, eps=1e-6, affine=True, dtype=dtype, device=device) + +def attention_basic(q, k, v, heads, mask=None): + b, _, dim_head = q.shape + dim_head //= heads + scale = dim_head ** -0.5 + + h = heads + q, k, v = map( + lambda t: t.unsqueeze(3) + .reshape(b, -1, heads, dim_head) + .permute(0, 2, 1, 3) + .reshape(b * heads, -1, dim_head) + .contiguous(), + (q, k, v), + ) + + # force cast to fp32 to avoid overflowing + if _ATTN_PRECISION =="fp32": + sim = einsum('b i d, b j d -> b i j', q.float(), k.float()) * scale + else: + sim = einsum('b i d, b j d -> b i j', q, k) * scale + + del q, k + + if exists(mask): + if mask.dtype == torch.bool: + mask = rearrange(mask, 'b ... -> b (...)') #TODO: check if this bool part matches pytorch attention + max_neg_value = -torch.finfo(sim.dtype).max + mask = repeat(mask, 'b j -> (b h) () j', h=h) + sim.masked_fill_(~mask, max_neg_value) + else: + sim += mask + + # attention, what we cannot get enough of + sim = sim.softmax(dim=-1) + + out = einsum('b i j, b j d -> b i d', sim.to(v.dtype), v) + out = ( + out.unsqueeze(0) + .reshape(b, heads, -1, dim_head) + .permute(0, 2, 1, 3) + .reshape(b, -1, heads * dim_head) + ) + return out + + +def attention_sub_quad(query, key, value, heads, mask=None): + b, _, dim_head = query.shape + dim_head //= heads + + scale = dim_head ** -0.5 + query = query.unsqueeze(3).reshape(b, -1, heads, dim_head).permute(0, 2, 1, 3).reshape(b * heads, -1, dim_head) + value = value.unsqueeze(3).reshape(b, -1, heads, dim_head).permute(0, 2, 1, 3).reshape(b * heads, -1, dim_head) + + key = key.unsqueeze(3).reshape(b, -1, heads, dim_head).permute(0, 2, 3, 1).reshape(b * heads, dim_head, -1) + + dtype = query.dtype + upcast_attention = _ATTN_PRECISION =="fp32" and query.dtype != torch.float32 + if upcast_attention: + bytes_per_token = torch.finfo(torch.float32).bits//8 + else: + bytes_per_token = torch.finfo(query.dtype).bits//8 + batch_x_heads, q_tokens, _ = query.shape + _, _, k_tokens = key.shape + qk_matmul_size_bytes = batch_x_heads * bytes_per_token * q_tokens * k_tokens + + mem_free_total, mem_free_torch = model_management.get_free_memory(query.device, True) + + kv_chunk_size_min = None + kv_chunk_size = None + query_chunk_size = None + + for x in [4096, 2048, 1024, 512, 256]: + count = mem_free_total / (batch_x_heads * bytes_per_token * x * 4.0) + if count >= k_tokens: + kv_chunk_size = k_tokens + query_chunk_size = x + break + + if query_chunk_size is None: + query_chunk_size = 512 + + hidden_states = efficient_dot_product_attention( + query, + key, + value, + query_chunk_size=query_chunk_size, + kv_chunk_size=kv_chunk_size, + kv_chunk_size_min=kv_chunk_size_min, + use_checkpoint=False, + upcast_attention=upcast_attention, + mask=mask, + ) + + hidden_states = hidden_states.to(dtype) + + hidden_states = hidden_states.unflatten(0, (-1, heads)).transpose(1,2).flatten(start_dim=2) + return hidden_states + +def attention_split(q, k, v, heads, mask=None): + b, _, dim_head = q.shape + dim_head //= heads + scale = dim_head ** -0.5 + + h = heads + q, k, v = map( + lambda t: t.unsqueeze(3) + .reshape(b, -1, heads, dim_head) + .permute(0, 2, 1, 3) + .reshape(b * heads, -1, dim_head) + .contiguous(), + (q, k, v), + ) + + r1 = torch.zeros(q.shape[0], q.shape[1], v.shape[2], device=q.device, dtype=q.dtype) + + mem_free_total = model_management.get_free_memory(q.device) + + if _ATTN_PRECISION =="fp32": + element_size = 4 + else: + element_size = q.element_size() + + gb = 1024 ** 3 + tensor_size = q.shape[0] * q.shape[1] * k.shape[1] * element_size + modifier = 3 + mem_required = tensor_size * modifier + steps = 1 + + + if mem_required > mem_free_total: + steps = 2**(math.ceil(math.log(mem_required / mem_free_total, 2))) + # print(f"Expected tensor size:{tensor_size/gb:0.1f}GB, cuda free:{mem_free_cuda/gb:0.1f}GB " + # f"torch free:{mem_free_torch/gb:0.1f} total:{mem_free_total/gb:0.1f} steps:{steps}") + + if steps > 64: + max_res = math.floor(math.sqrt(math.sqrt(mem_free_total / 2.5)) / 8) * 64 + raise RuntimeError(f'Not enough memory, use lower resolution (max approx. {max_res}x{max_res}). ' + f'Need: {mem_required/64/gb:0.1f}GB free, Have:{mem_free_total/gb:0.1f}GB free') + + # print("steps", steps, mem_required, mem_free_total, modifier, q.element_size(), tensor_size) + first_op_done = False + cleared_cache = False + while True: + try: + slice_size = q.shape[1] // steps if (q.shape[1] % steps) == 0 else q.shape[1] + for i in range(0, q.shape[1], slice_size): + end = i + slice_size + if _ATTN_PRECISION =="fp32": + with torch.autocast(enabled=False, device_type = 'cuda'): + s1 = einsum('b i d, b j d -> b i j', q[:, i:end].float(), k.float()) * scale + else: + s1 = einsum('b i d, b j d -> b i j', q[:, i:end], k) * scale + + if mask is not None: + if len(mask.shape) == 2: + s1 += mask[i:end] + else: + s1 += mask[:, i:end] + + s2 = s1.softmax(dim=-1).to(v.dtype) + del s1 + first_op_done = True + + r1[:, i:end] = einsum('b i j, b j d -> b i d', s2, v) + del s2 + break + except model_management.OOM_EXCEPTION as e: + if first_op_done == False: + model_management.soft_empty_cache(True) + if cleared_cache == False: + cleared_cache = True + print("out of memory error, emptying cache and trying again") + continue + steps *= 2 + if steps > 64: + raise e + print("out of memory error, increasing steps and trying again", steps) + else: + raise e + + del q, k, v + + r1 = ( + r1.unsqueeze(0) + .reshape(b, heads, -1, dim_head) + .permute(0, 2, 1, 3) + .reshape(b, -1, heads * dim_head) + ) + return r1 + +BROKEN_XFORMERS = False +try: + x_vers = xformers.__version__ + #I think 0.0.23 is also broken (q with bs bigger than 65535 gives CUDA error) + BROKEN_XFORMERS = x_vers.startswith("0.0.21") or x_vers.startswith("0.0.22") or x_vers.startswith("0.0.23") +except: + pass + +def attention_xformers(q, k, v, heads, mask=None): + b, _, dim_head = q.shape + dim_head //= heads + if BROKEN_XFORMERS: + if b * heads > 65535: + return attention_pytorch(q, k, v, heads, mask) + + q, k, v = map( + lambda t: t.unsqueeze(3) + .reshape(b, -1, heads, dim_head) + .permute(0, 2, 1, 3) + .reshape(b * heads, -1, dim_head) + .contiguous(), + (q, k, v), + ) + + if mask is not None: + pad = 8 - q.shape[1] % 8 + mask_out = torch.empty([q.shape[0], q.shape[1], q.shape[1] + pad], dtype=q.dtype, device=q.device) + mask_out[:, :, :mask.shape[-1]] = mask + mask = mask_out[:, :, :mask.shape[-1]] + + out = xformers.ops.memory_efficient_attention(q, k, v, attn_bias=mask) + + out = ( + out.unsqueeze(0) + .reshape(b, heads, -1, dim_head) + .permute(0, 2, 1, 3) + .reshape(b, -1, heads * dim_head) + ) + return out + +def attention_pytorch(q, k, v, heads, mask=None): + b, _, dim_head = q.shape + dim_head //= heads + q, k, v = map( + lambda t: t.view(b, -1, heads, dim_head).transpose(1, 2), + (q, k, v), + ) + + out = torch.nn.functional.scaled_dot_product_attention(q, k, v, attn_mask=mask, dropout_p=0.0, is_causal=False) + out = ( + out.transpose(1, 2).reshape(b, -1, heads * dim_head) + ) + return out + + +optimized_attention = attention_basic + +if model_management.xformers_enabled(): + print("Using xformers cross attention") + optimized_attention = attention_xformers +elif model_management.pytorch_attention_enabled(): + print("Using pytorch cross attention") + optimized_attention = attention_pytorch +else: + if args.attention_split: + print("Using split optimization for cross attention") + optimized_attention = attention_split + else: + print("Using sub quadratic optimization for cross attention, if you have memory or speed issues try using: --attention-split") + optimized_attention = attention_sub_quad + +optimized_attention_masked = optimized_attention + +def optimized_attention_for_device(device, mask=False, small_input=False): + if small_input: + if model_management.pytorch_attention_enabled(): + return attention_pytorch #TODO: need to confirm but this is probably slightly faster for small inputs in all cases + else: + return attention_basic + + if device == torch.device("cpu"): + return attention_sub_quad + + if mask: + return optimized_attention_masked + + return optimized_attention + + +class CrossAttention(nn.Module): + def __init__(self, query_dim, context_dim=None, heads=8, dim_head=64, dropout=0., dtype=None, device=None, operations=ops): + super().__init__() + inner_dim = dim_head * heads + context_dim = default(context_dim, query_dim) + + self.heads = heads + self.dim_head = dim_head + + self.to_q = operations.Linear(query_dim, inner_dim, bias=False, dtype=dtype, device=device) + self.to_k = operations.Linear(context_dim, inner_dim, bias=False, dtype=dtype, device=device) + self.to_v = operations.Linear(context_dim, inner_dim, bias=False, dtype=dtype, device=device) + + self.to_out = nn.Sequential(operations.Linear(inner_dim, query_dim, dtype=dtype, device=device), nn.Dropout(dropout)) + + def forward(self, x, context=None, value=None, mask=None): + q = self.to_q(x) + context = default(context, x) + k = self.to_k(context) + if value is not None: + v = self.to_v(value) + del value + else: + v = self.to_v(context) + + if mask is None: + out = optimized_attention(q, k, v, self.heads) + else: + out = optimized_attention_masked(q, k, v, self.heads, mask) + return self.to_out(out) + + +class BasicTransformerBlock(nn.Module): + def __init__(self, dim, n_heads, d_head, dropout=0., context_dim=None, gated_ff=True, checkpoint=True, ff_in=False, inner_dim=None, + disable_self_attn=False, disable_temporal_crossattention=False, switch_temporal_ca_to_sa=False, dtype=None, device=None, operations=ops): + super().__init__() + + self.ff_in = ff_in or inner_dim is not None + if inner_dim is None: + inner_dim = dim + + self.is_res = inner_dim == dim + + if self.ff_in: + self.norm_in = operations.LayerNorm(dim, dtype=dtype, device=device) + self.ff_in = FeedForward(dim, dim_out=inner_dim, dropout=dropout, glu=gated_ff, dtype=dtype, device=device, operations=operations) + + self.disable_self_attn = disable_self_attn + self.attn1 = CrossAttention(query_dim=inner_dim, heads=n_heads, dim_head=d_head, dropout=dropout, + context_dim=context_dim if self.disable_self_attn else None, dtype=dtype, device=device, operations=operations) # is a self-attention if not self.disable_self_attn + self.ff = FeedForward(inner_dim, dim_out=dim, dropout=dropout, glu=gated_ff, dtype=dtype, device=device, operations=operations) + + if disable_temporal_crossattention: + if switch_temporal_ca_to_sa: + raise ValueError + else: + self.attn2 = None + else: + context_dim_attn2 = None + if not switch_temporal_ca_to_sa: + context_dim_attn2 = context_dim + + self.attn2 = CrossAttention(query_dim=inner_dim, context_dim=context_dim_attn2, + heads=n_heads, dim_head=d_head, dropout=dropout, dtype=dtype, device=device, operations=operations) # is self-attn if context is none + self.norm2 = operations.LayerNorm(inner_dim, dtype=dtype, device=device) + + self.norm1 = operations.LayerNorm(inner_dim, dtype=dtype, device=device) + self.norm3 = operations.LayerNorm(inner_dim, dtype=dtype, device=device) + self.checkpoint = checkpoint + self.n_heads = n_heads + self.d_head = d_head + self.switch_temporal_ca_to_sa = switch_temporal_ca_to_sa + + def forward(self, x, context=None, transformer_options={}): + return checkpoint(self._forward, (x, context, transformer_options), self.parameters(), self.checkpoint) + + def _forward(self, x, context=None, transformer_options={}): + extra_options = {} + block = transformer_options.get("block", None) + block_index = transformer_options.get("block_index", 0) + transformer_patches = {} + transformer_patches_replace = {} + + for k in transformer_options: + if k == "patches": + transformer_patches = transformer_options[k] + elif k == "patches_replace": + transformer_patches_replace = transformer_options[k] + else: + extra_options[k] = transformer_options[k] + + extra_options["n_heads"] = self.n_heads + extra_options["dim_head"] = self.d_head + + if self.ff_in: + x_skip = x + x = self.ff_in(self.norm_in(x)) + if self.is_res: + x += x_skip + + n = self.norm1(x) + if self.disable_self_attn: + context_attn1 = context + else: + context_attn1 = None + value_attn1 = None + + if "attn1_patch" in transformer_patches: + patch = transformer_patches["attn1_patch"] + if context_attn1 is None: + context_attn1 = n + value_attn1 = context_attn1 + for p in patch: + n, context_attn1, value_attn1 = p(n, context_attn1, value_attn1, extra_options) + + if block is not None: + transformer_block = (block[0], block[1], block_index) + else: + transformer_block = None + attn1_replace_patch = transformer_patches_replace.get("attn1", {}) + block_attn1 = transformer_block + if block_attn1 not in attn1_replace_patch: + block_attn1 = block + + if block_attn1 in attn1_replace_patch: + if context_attn1 is None: + context_attn1 = n + value_attn1 = n + n = self.attn1.to_q(n) + context_attn1 = self.attn1.to_k(context_attn1) + value_attn1 = self.attn1.to_v(value_attn1) + n = attn1_replace_patch[block_attn1](n, context_attn1, value_attn1, extra_options) + n = self.attn1.to_out(n) + else: + n = self.attn1(n, context=context_attn1, value=value_attn1) + + if "attn1_output_patch" in transformer_patches: + patch = transformer_patches["attn1_output_patch"] + for p in patch: + n = p(n, extra_options) + + x += n + if "middle_patch" in transformer_patches: + patch = transformer_patches["middle_patch"] + for p in patch: + x = p(x, extra_options) + + if self.attn2 is not None: + n = self.norm2(x) + if self.switch_temporal_ca_to_sa: + context_attn2 = n + else: + context_attn2 = context + value_attn2 = None + if "attn2_patch" in transformer_patches: + patch = transformer_patches["attn2_patch"] + value_attn2 = context_attn2 + for p in patch: + n, context_attn2, value_attn2 = p(n, context_attn2, value_attn2, extra_options) + + attn2_replace_patch = transformer_patches_replace.get("attn2", {}) + block_attn2 = transformer_block + if block_attn2 not in attn2_replace_patch: + block_attn2 = block + + if block_attn2 in attn2_replace_patch: + if value_attn2 is None: + value_attn2 = context_attn2 + n = self.attn2.to_q(n) + context_attn2 = self.attn2.to_k(context_attn2) + value_attn2 = self.attn2.to_v(value_attn2) + n = attn2_replace_patch[block_attn2](n, context_attn2, value_attn2, extra_options) + n = self.attn2.to_out(n) + else: + n = self.attn2(n, context=context_attn2, value=value_attn2) + + if "attn2_output_patch" in transformer_patches: + patch = transformer_patches["attn2_output_patch"] + for p in patch: + n = p(n, extra_options) + + x += n + if self.is_res: + x_skip = x + x = self.ff(self.norm3(x)) + if self.is_res: + x += x_skip + + return x + + +class SpatialTransformer(nn.Module): + """ + Transformer block for image-like data. + First, project the input (aka embedding) + and reshape to b, t, d. + Then apply standard transformer action. + Finally, reshape to image + NEW: use_linear for more efficiency instead of the 1x1 convs + """ + def __init__(self, in_channels, n_heads, d_head, + depth=1, dropout=0., context_dim=None, + disable_self_attn=False, use_linear=False, + use_checkpoint=True, dtype=None, device=None, operations=ops): + super().__init__() + if exists(context_dim) and not isinstance(context_dim, list): + context_dim = [context_dim] * depth + self.in_channels = in_channels + inner_dim = n_heads * d_head + self.norm = operations.GroupNorm(num_groups=32, num_channels=in_channels, eps=1e-6, affine=True, dtype=dtype, device=device) + if not use_linear: + self.proj_in = operations.Conv2d(in_channels, + inner_dim, + kernel_size=1, + stride=1, + padding=0, dtype=dtype, device=device) + else: + self.proj_in = operations.Linear(in_channels, inner_dim, dtype=dtype, device=device) + + self.transformer_blocks = nn.ModuleList( + [BasicTransformerBlock(inner_dim, n_heads, d_head, dropout=dropout, context_dim=context_dim[d], + disable_self_attn=disable_self_attn, checkpoint=use_checkpoint, dtype=dtype, device=device, operations=operations) + for d in range(depth)] + ) + if not use_linear: + self.proj_out = operations.Conv2d(inner_dim,in_channels, + kernel_size=1, + stride=1, + padding=0, dtype=dtype, device=device) + else: + self.proj_out = operations.Linear(in_channels, inner_dim, dtype=dtype, device=device) + self.use_linear = use_linear + + def forward(self, x, context=None, transformer_options={}): + # note: if no context is given, cross-attention defaults to self-attention + if not isinstance(context, list): + context = [context] * len(self.transformer_blocks) + b, c, h, w = x.shape + x_in = x + x = self.norm(x) + if not self.use_linear: + x = self.proj_in(x) + x = rearrange(x, 'b c h w -> b (h w) c').contiguous() + if self.use_linear: + x = self.proj_in(x) + for i, block in enumerate(self.transformer_blocks): + transformer_options["block_index"] = i + x = block(x, context=context[i], transformer_options=transformer_options) + if self.use_linear: + x = self.proj_out(x) + x = rearrange(x, 'b (h w) c -> b c h w', h=h, w=w).contiguous() + if not self.use_linear: + x = self.proj_out(x) + return x + x_in + + +class SpatialVideoTransformer(SpatialTransformer): + def __init__( + self, + in_channels, + n_heads, + d_head, + depth=1, + dropout=0.0, + use_linear=False, + context_dim=None, + use_spatial_context=False, + timesteps=None, + merge_strategy: str = "fixed", + merge_factor: float = 0.5, + time_context_dim=None, + ff_in=False, + checkpoint=False, + time_depth=1, + disable_self_attn=False, + disable_temporal_crossattention=False, + max_time_embed_period: int = 10000, + dtype=None, device=None, operations=ops + ): + super().__init__( + in_channels, + n_heads, + d_head, + depth=depth, + dropout=dropout, + use_checkpoint=checkpoint, + context_dim=context_dim, + use_linear=use_linear, + disable_self_attn=disable_self_attn, + dtype=dtype, device=device, operations=operations + ) + self.time_depth = time_depth + self.depth = depth + self.max_time_embed_period = max_time_embed_period + + time_mix_d_head = d_head + n_time_mix_heads = n_heads + + time_mix_inner_dim = int(time_mix_d_head * n_time_mix_heads) + + inner_dim = n_heads * d_head + if use_spatial_context: + time_context_dim = context_dim + + self.time_stack = nn.ModuleList( + [ + BasicTransformerBlock( + inner_dim, + n_time_mix_heads, + time_mix_d_head, + dropout=dropout, + context_dim=time_context_dim, + # timesteps=timesteps, + checkpoint=checkpoint, + ff_in=ff_in, + inner_dim=time_mix_inner_dim, + disable_self_attn=disable_self_attn, + disable_temporal_crossattention=disable_temporal_crossattention, + dtype=dtype, device=device, operations=operations + ) + for _ in range(self.depth) + ] + ) + + assert len(self.time_stack) == len(self.transformer_blocks) + + self.use_spatial_context = use_spatial_context + self.in_channels = in_channels + + time_embed_dim = self.in_channels * 4 + self.time_pos_embed = nn.Sequential( + operations.Linear(self.in_channels, time_embed_dim, dtype=dtype, device=device), + nn.SiLU(), + operations.Linear(time_embed_dim, self.in_channels, dtype=dtype, device=device), + ) + + self.time_mixer = AlphaBlender( + alpha=merge_factor, merge_strategy=merge_strategy + ) + + def forward( + self, + x: torch.Tensor, + context: Optional[torch.Tensor] = None, + time_context: Optional[torch.Tensor] = None, + timesteps: Optional[int] = None, + image_only_indicator: Optional[torch.Tensor] = None, + transformer_options={} + ) -> torch.Tensor: + _, _, h, w = x.shape + x_in = x + spatial_context = None + if exists(context): + spatial_context = context + + if self.use_spatial_context: + assert ( + context.ndim == 3 + ), f"n dims of spatial context should be 3 but are {context.ndim}" + + if time_context is None: + time_context = context + time_context_first_timestep = time_context[::timesteps] + time_context = repeat( + time_context_first_timestep, "b ... -> (b n) ...", n=h * w + ) + elif time_context is not None and not self.use_spatial_context: + time_context = repeat(time_context, "b ... -> (b n) ...", n=h * w) + if time_context.ndim == 2: + time_context = rearrange(time_context, "b c -> b 1 c") + + x = self.norm(x) + if not self.use_linear: + x = self.proj_in(x) + x = rearrange(x, "b c h w -> b (h w) c") + if self.use_linear: + x = self.proj_in(x) + + num_frames = torch.arange(timesteps, device=x.device) + num_frames = repeat(num_frames, "t -> b t", b=x.shape[0] // timesteps) + num_frames = rearrange(num_frames, "b t -> (b t)") + t_emb = timestep_embedding(num_frames, self.in_channels, repeat_only=False, max_period=self.max_time_embed_period).to(x.dtype) + emb = self.time_pos_embed(t_emb) + emb = emb[:, None, :] + + for it_, (block, mix_block) in enumerate( + zip(self.transformer_blocks, self.time_stack) + ): + transformer_options["block_index"] = it_ + x = block( + x, + context=spatial_context, + transformer_options=transformer_options, + ) + + x_mix = x + x_mix = x_mix + emb + + B, S, C = x_mix.shape + x_mix = rearrange(x_mix, "(b t) s c -> (b s) t c", t=timesteps) + x_mix = mix_block(x_mix, context=time_context) #TODO: transformer_options + x_mix = rearrange( + x_mix, "(b s) t c -> (b t) s c", s=S, b=B // timesteps, c=C, t=timesteps + ) + + x = self.time_mixer(x_spatial=x, x_temporal=x_mix, image_only_indicator=image_only_indicator) + + if self.use_linear: + x = self.proj_out(x) + x = rearrange(x, "b (h w) c -> b c h w", h=h, w=w) + if not self.use_linear: + x = self.proj_out(x) + out = x + x_in + return out + + diff --git a/ldm_patched/ldm/modules/diffusionmodules/__init__.py b/ldm_patched/ldm/modules/diffusionmodules/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ldm_patched/ldm/modules/diffusionmodules/__pycache__/__init__.cpython-310.pyc b/ldm_patched/ldm/modules/diffusionmodules/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9d0c4df5fc7eff16e16f38fb35dc5ca698c8832e Binary files /dev/null and b/ldm_patched/ldm/modules/diffusionmodules/__pycache__/__init__.cpython-310.pyc differ diff --git a/ldm_patched/ldm/modules/diffusionmodules/__pycache__/model.cpython-310.pyc b/ldm_patched/ldm/modules/diffusionmodules/__pycache__/model.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5354b81da2ebb1ee2789607f5eb1749b08ce6661 Binary files /dev/null and b/ldm_patched/ldm/modules/diffusionmodules/__pycache__/model.cpython-310.pyc differ diff --git a/ldm_patched/ldm/modules/diffusionmodules/__pycache__/openaimodel.cpython-310.pyc b/ldm_patched/ldm/modules/diffusionmodules/__pycache__/openaimodel.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f4cd01d077230f97466ca1e33485df7d9ab0cfa8 Binary files /dev/null and b/ldm_patched/ldm/modules/diffusionmodules/__pycache__/openaimodel.cpython-310.pyc differ diff --git a/ldm_patched/ldm/modules/diffusionmodules/__pycache__/upscaling.cpython-310.pyc b/ldm_patched/ldm/modules/diffusionmodules/__pycache__/upscaling.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c63d75e229b4bdfc728c9e8a95f09a8edb40e1c7 Binary files /dev/null and b/ldm_patched/ldm/modules/diffusionmodules/__pycache__/upscaling.cpython-310.pyc differ diff --git a/ldm_patched/ldm/modules/diffusionmodules/__pycache__/util.cpython-310.pyc b/ldm_patched/ldm/modules/diffusionmodules/__pycache__/util.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..04aa3015effb1ed77e9076667e759d60a8970fd2 Binary files /dev/null and b/ldm_patched/ldm/modules/diffusionmodules/__pycache__/util.cpython-310.pyc differ diff --git a/ldm_patched/ldm/modules/diffusionmodules/model.py b/ldm_patched/ldm/modules/diffusionmodules/model.py new file mode 100644 index 0000000000000000000000000000000000000000..1901145c59863b6503eb8bd28eb8c22b77219e17 --- /dev/null +++ b/ldm_patched/ldm/modules/diffusionmodules/model.py @@ -0,0 +1,650 @@ +# pytorch_diffusion + derived encoder decoder +import math +import torch +import torch.nn as nn +import numpy as np +from einops import rearrange +from typing import Optional, Any + +from ldm_patched.modules import model_management +import ldm_patched.modules.ops +ops = ldm_patched.modules.ops.disable_weight_init + +if model_management.xformers_enabled_vae(): + import xformers + import xformers.ops + +def get_timestep_embedding(timesteps, embedding_dim): + """ + This matches the implementation in Denoising Diffusion Probabilistic Models: + From Fairseq. + Build sinusoidal embeddings. + This matches the implementation in tensor2tensor, but differs slightly + from the description in Section 3.5 of "Attention Is All You Need". + """ + assert len(timesteps.shape) == 1 + + half_dim = embedding_dim // 2 + emb = math.log(10000) / (half_dim - 1) + emb = torch.exp(torch.arange(half_dim, dtype=torch.float32) * -emb) + emb = emb.to(device=timesteps.device) + emb = timesteps.float()[:, None] * emb[None, :] + emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=1) + if embedding_dim % 2 == 1: # zero pad + emb = torch.nn.functional.pad(emb, (0,1,0,0)) + return emb + + +def nonlinearity(x): + # swish + return x*torch.sigmoid(x) + + +def Normalize(in_channels, num_groups=32): + return ops.GroupNorm(num_groups=num_groups, num_channels=in_channels, eps=1e-6, affine=True) + + +class Upsample(nn.Module): + def __init__(self, in_channels, with_conv): + super().__init__() + self.with_conv = with_conv + if self.with_conv: + self.conv = ops.Conv2d(in_channels, + in_channels, + kernel_size=3, + stride=1, + padding=1) + + def forward(self, x): + try: + x = torch.nn.functional.interpolate(x, scale_factor=2.0, mode="nearest") + except: #operation not implemented for bf16 + b, c, h, w = x.shape + out = torch.empty((b, c, h*2, w*2), dtype=x.dtype, layout=x.layout, device=x.device) + split = 8 + l = out.shape[1] // split + for i in range(0, out.shape[1], l): + out[:,i:i+l] = torch.nn.functional.interpolate(x[:,i:i+l].to(torch.float32), scale_factor=2.0, mode="nearest").to(x.dtype) + del x + x = out + + if self.with_conv: + x = self.conv(x) + return x + + +class Downsample(nn.Module): + def __init__(self, in_channels, with_conv): + super().__init__() + self.with_conv = with_conv + if self.with_conv: + # no asymmetric padding in torch conv, must do it ourselves + self.conv = ops.Conv2d(in_channels, + in_channels, + kernel_size=3, + stride=2, + padding=0) + + def forward(self, x): + if self.with_conv: + pad = (0,1,0,1) + x = torch.nn.functional.pad(x, pad, mode="constant", value=0) + x = self.conv(x) + else: + x = torch.nn.functional.avg_pool2d(x, kernel_size=2, stride=2) + return x + + +class ResnetBlock(nn.Module): + def __init__(self, *, in_channels, out_channels=None, conv_shortcut=False, + dropout, temb_channels=512): + super().__init__() + self.in_channels = in_channels + out_channels = in_channels if out_channels is None else out_channels + self.out_channels = out_channels + self.use_conv_shortcut = conv_shortcut + + self.swish = torch.nn.SiLU(inplace=True) + self.norm1 = Normalize(in_channels) + self.conv1 = ops.Conv2d(in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1) + if temb_channels > 0: + self.temb_proj = ops.Linear(temb_channels, + out_channels) + self.norm2 = Normalize(out_channels) + self.dropout = torch.nn.Dropout(dropout, inplace=True) + self.conv2 = ops.Conv2d(out_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1) + if self.in_channels != self.out_channels: + if self.use_conv_shortcut: + self.conv_shortcut = ops.Conv2d(in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1) + else: + self.nin_shortcut = ops.Conv2d(in_channels, + out_channels, + kernel_size=1, + stride=1, + padding=0) + + def forward(self, x, temb): + h = x + h = self.norm1(h) + h = self.swish(h) + h = self.conv1(h) + + if temb is not None: + h = h + self.temb_proj(self.swish(temb))[:,:,None,None] + + h = self.norm2(h) + h = self.swish(h) + h = self.dropout(h) + h = self.conv2(h) + + if self.in_channels != self.out_channels: + if self.use_conv_shortcut: + x = self.conv_shortcut(x) + else: + x = self.nin_shortcut(x) + + return x+h + +def slice_attention(q, k, v): + r1 = torch.zeros_like(k, device=q.device) + scale = (int(q.shape[-1])**(-0.5)) + + mem_free_total = model_management.get_free_memory(q.device) + + gb = 1024 ** 3 + tensor_size = q.shape[0] * q.shape[1] * k.shape[2] * q.element_size() + modifier = 3 if q.element_size() == 2 else 2.5 + mem_required = tensor_size * modifier + steps = 1 + + if mem_required > mem_free_total: + steps = 2**(math.ceil(math.log(mem_required / mem_free_total, 2))) + + while True: + try: + slice_size = q.shape[1] // steps if (q.shape[1] % steps) == 0 else q.shape[1] + for i in range(0, q.shape[1], slice_size): + end = i + slice_size + s1 = torch.bmm(q[:, i:end], k) * scale + + s2 = torch.nn.functional.softmax(s1, dim=2).permute(0,2,1) + del s1 + + r1[:, :, i:end] = torch.bmm(v, s2) + del s2 + break + except model_management.OOM_EXCEPTION as e: + model_management.soft_empty_cache(True) + steps *= 2 + if steps > 128: + raise e + print("out of memory error, increasing steps and trying again", steps) + + return r1 + +def normal_attention(q, k, v): + # compute attention + b,c,h,w = q.shape + + q = q.reshape(b,c,h*w) + q = q.permute(0,2,1) # b,hw,c + k = k.reshape(b,c,h*w) # b,c,hw + v = v.reshape(b,c,h*w) + + r1 = slice_attention(q, k, v) + h_ = r1.reshape(b,c,h,w) + del r1 + return h_ + +def xformers_attention(q, k, v): + # compute attention + B, C, H, W = q.shape + q, k, v = map( + lambda t: t.view(B, C, -1).transpose(1, 2).contiguous(), + (q, k, v), + ) + + try: + out = xformers.ops.memory_efficient_attention(q, k, v, attn_bias=None) + out = out.transpose(1, 2).reshape(B, C, H, W) + except NotImplementedError as e: + out = slice_attention(q.view(B, -1, C), k.view(B, -1, C).transpose(1, 2), v.view(B, -1, C).transpose(1, 2)).reshape(B, C, H, W) + return out + +def pytorch_attention(q, k, v): + # compute attention + B, C, H, W = q.shape + q, k, v = map( + lambda t: t.view(B, 1, C, -1).transpose(2, 3).contiguous(), + (q, k, v), + ) + + try: + out = torch.nn.functional.scaled_dot_product_attention(q, k, v, attn_mask=None, dropout_p=0.0, is_causal=False) + out = out.transpose(2, 3).reshape(B, C, H, W) + except model_management.OOM_EXCEPTION as e: + print("scaled_dot_product_attention OOMed: switched to slice attention") + out = slice_attention(q.view(B, -1, C), k.view(B, -1, C).transpose(1, 2), v.view(B, -1, C).transpose(1, 2)).reshape(B, C, H, W) + return out + + +class AttnBlock(nn.Module): + def __init__(self, in_channels): + super().__init__() + self.in_channels = in_channels + + self.norm = Normalize(in_channels) + self.q = ops.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + self.k = ops.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + self.v = ops.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + self.proj_out = ops.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + + if model_management.xformers_enabled_vae(): + print("Using xformers attention in VAE") + self.optimized_attention = xformers_attention + elif model_management.pytorch_attention_enabled(): + print("Using pytorch attention in VAE") + self.optimized_attention = pytorch_attention + else: + print("Using split attention in VAE") + self.optimized_attention = normal_attention + + def forward(self, x): + h_ = x + h_ = self.norm(h_) + q = self.q(h_) + k = self.k(h_) + v = self.v(h_) + + h_ = self.optimized_attention(q, k, v) + + h_ = self.proj_out(h_) + + return x+h_ + + +def make_attn(in_channels, attn_type="vanilla", attn_kwargs=None): + return AttnBlock(in_channels) + + +class Model(nn.Module): + def __init__(self, *, ch, out_ch, ch_mult=(1,2,4,8), num_res_blocks, + attn_resolutions, dropout=0.0, resamp_with_conv=True, in_channels, + resolution, use_timestep=True, use_linear_attn=False, attn_type="vanilla"): + super().__init__() + if use_linear_attn: attn_type = "linear" + self.ch = ch + self.temb_ch = self.ch*4 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + + self.use_timestep = use_timestep + if self.use_timestep: + # timestep embedding + self.temb = nn.Module() + self.temb.dense = nn.ModuleList([ + ops.Linear(self.ch, + self.temb_ch), + ops.Linear(self.temb_ch, + self.temb_ch), + ]) + + # downsampling + self.conv_in = ops.Conv2d(in_channels, + self.ch, + kernel_size=3, + stride=1, + padding=1) + + curr_res = resolution + in_ch_mult = (1,)+tuple(ch_mult) + self.down = nn.ModuleList() + for i_level in range(self.num_resolutions): + block = nn.ModuleList() + attn = nn.ModuleList() + block_in = ch*in_ch_mult[i_level] + block_out = ch*ch_mult[i_level] + for i_block in range(self.num_res_blocks): + block.append(ResnetBlock(in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout)) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn(block_in, attn_type=attn_type)) + down = nn.Module() + down.block = block + down.attn = attn + if i_level != self.num_resolutions-1: + down.downsample = Downsample(block_in, resamp_with_conv) + curr_res = curr_res // 2 + self.down.append(down) + + # middle + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout) + self.mid.attn_1 = make_attn(block_in, attn_type=attn_type) + self.mid.block_2 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout) + + # upsampling + self.up = nn.ModuleList() + for i_level in reversed(range(self.num_resolutions)): + block = nn.ModuleList() + attn = nn.ModuleList() + block_out = ch*ch_mult[i_level] + skip_in = ch*ch_mult[i_level] + for i_block in range(self.num_res_blocks+1): + if i_block == self.num_res_blocks: + skip_in = ch*in_ch_mult[i_level] + block.append(ResnetBlock(in_channels=block_in+skip_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout)) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn(block_in, attn_type=attn_type)) + up = nn.Module() + up.block = block + up.attn = attn + if i_level != 0: + up.upsample = Upsample(block_in, resamp_with_conv) + curr_res = curr_res * 2 + self.up.insert(0, up) # prepend to get consistent order + + # end + self.norm_out = Normalize(block_in) + self.conv_out = ops.Conv2d(block_in, + out_ch, + kernel_size=3, + stride=1, + padding=1) + + def forward(self, x, t=None, context=None): + #assert x.shape[2] == x.shape[3] == self.resolution + if context is not None: + # assume aligned context, cat along channel axis + x = torch.cat((x, context), dim=1) + if self.use_timestep: + # timestep embedding + assert t is not None + temb = get_timestep_embedding(t, self.ch) + temb = self.temb.dense[0](temb) + temb = nonlinearity(temb) + temb = self.temb.dense[1](temb) + else: + temb = None + + # downsampling + hs = [self.conv_in(x)] + for i_level in range(self.num_resolutions): + for i_block in range(self.num_res_blocks): + h = self.down[i_level].block[i_block](hs[-1], temb) + if len(self.down[i_level].attn) > 0: + h = self.down[i_level].attn[i_block](h) + hs.append(h) + if i_level != self.num_resolutions-1: + hs.append(self.down[i_level].downsample(hs[-1])) + + # middle + h = hs[-1] + h = self.mid.block_1(h, temb) + h = self.mid.attn_1(h) + h = self.mid.block_2(h, temb) + + # upsampling + for i_level in reversed(range(self.num_resolutions)): + for i_block in range(self.num_res_blocks+1): + h = self.up[i_level].block[i_block]( + torch.cat([h, hs.pop()], dim=1), temb) + if len(self.up[i_level].attn) > 0: + h = self.up[i_level].attn[i_block](h) + if i_level != 0: + h = self.up[i_level].upsample(h) + + # end + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + return h + + def get_last_layer(self): + return self.conv_out.weight + + +class Encoder(nn.Module): + def __init__(self, *, ch, out_ch, ch_mult=(1,2,4,8), num_res_blocks, + attn_resolutions, dropout=0.0, resamp_with_conv=True, in_channels, + resolution, z_channels, double_z=True, use_linear_attn=False, attn_type="vanilla", + **ignore_kwargs): + super().__init__() + if use_linear_attn: attn_type = "linear" + self.ch = ch + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + + # downsampling + self.conv_in = ops.Conv2d(in_channels, + self.ch, + kernel_size=3, + stride=1, + padding=1) + + curr_res = resolution + in_ch_mult = (1,)+tuple(ch_mult) + self.in_ch_mult = in_ch_mult + self.down = nn.ModuleList() + for i_level in range(self.num_resolutions): + block = nn.ModuleList() + attn = nn.ModuleList() + block_in = ch*in_ch_mult[i_level] + block_out = ch*ch_mult[i_level] + for i_block in range(self.num_res_blocks): + block.append(ResnetBlock(in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout)) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn(block_in, attn_type=attn_type)) + down = nn.Module() + down.block = block + down.attn = attn + if i_level != self.num_resolutions-1: + down.downsample = Downsample(block_in, resamp_with_conv) + curr_res = curr_res // 2 + self.down.append(down) + + # middle + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout) + self.mid.attn_1 = make_attn(block_in, attn_type=attn_type) + self.mid.block_2 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout) + + # end + self.norm_out = Normalize(block_in) + self.conv_out = ops.Conv2d(block_in, + 2*z_channels if double_z else z_channels, + kernel_size=3, + stride=1, + padding=1) + + def forward(self, x): + # timestep embedding + temb = None + # downsampling + h = self.conv_in(x) + for i_level in range(self.num_resolutions): + for i_block in range(self.num_res_blocks): + h = self.down[i_level].block[i_block](h, temb) + if len(self.down[i_level].attn) > 0: + h = self.down[i_level].attn[i_block](h) + if i_level != self.num_resolutions-1: + h = self.down[i_level].downsample(h) + + # middle + h = self.mid.block_1(h, temb) + h = self.mid.attn_1(h) + h = self.mid.block_2(h, temb) + + # end + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + return h + + +class Decoder(nn.Module): + def __init__(self, *, ch, out_ch, ch_mult=(1,2,4,8), num_res_blocks, + attn_resolutions, dropout=0.0, resamp_with_conv=True, in_channels, + resolution, z_channels, give_pre_end=False, tanh_out=False, use_linear_attn=False, + conv_out_op=ops.Conv2d, + resnet_op=ResnetBlock, + attn_op=AttnBlock, + **ignorekwargs): + super().__init__() + if use_linear_attn: attn_type = "linear" + self.ch = ch + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + self.give_pre_end = give_pre_end + self.tanh_out = tanh_out + + # compute in_ch_mult, block_in and curr_res at lowest res + in_ch_mult = (1,)+tuple(ch_mult) + block_in = ch*ch_mult[self.num_resolutions-1] + curr_res = resolution // 2**(self.num_resolutions-1) + self.z_shape = (1,z_channels,curr_res,curr_res) + print("Working with z of shape {} = {} dimensions.".format( + self.z_shape, np.prod(self.z_shape))) + + # z to block_in + self.conv_in = ops.Conv2d(z_channels, + block_in, + kernel_size=3, + stride=1, + padding=1) + + # middle + self.mid = nn.Module() + self.mid.block_1 = resnet_op(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout) + self.mid.attn_1 = attn_op(block_in) + self.mid.block_2 = resnet_op(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout) + + # upsampling + self.up = nn.ModuleList() + for i_level in reversed(range(self.num_resolutions)): + block = nn.ModuleList() + attn = nn.ModuleList() + block_out = ch*ch_mult[i_level] + for i_block in range(self.num_res_blocks+1): + block.append(resnet_op(in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout)) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(attn_op(block_in)) + up = nn.Module() + up.block = block + up.attn = attn + if i_level != 0: + up.upsample = Upsample(block_in, resamp_with_conv) + curr_res = curr_res * 2 + self.up.insert(0, up) # prepend to get consistent order + + # end + self.norm_out = Normalize(block_in) + self.conv_out = conv_out_op(block_in, + out_ch, + kernel_size=3, + stride=1, + padding=1) + + def forward(self, z, **kwargs): + #assert z.shape[1:] == self.z_shape[1:] + self.last_z_shape = z.shape + + # timestep embedding + temb = None + + # z to block_in + h = self.conv_in(z) + + # middle + h = self.mid.block_1(h, temb, **kwargs) + h = self.mid.attn_1(h, **kwargs) + h = self.mid.block_2(h, temb, **kwargs) + + # upsampling + for i_level in reversed(range(self.num_resolutions)): + for i_block in range(self.num_res_blocks+1): + h = self.up[i_level].block[i_block](h, temb, **kwargs) + if len(self.up[i_level].attn) > 0: + h = self.up[i_level].attn[i_block](h, **kwargs) + if i_level != 0: + h = self.up[i_level].upsample(h) + + # end + if self.give_pre_end: + return h + + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h, **kwargs) + if self.tanh_out: + h = torch.tanh(h) + return h diff --git a/ldm_patched/ldm/modules/diffusionmodules/openaimodel.py b/ldm_patched/ldm/modules/diffusionmodules/openaimodel.py new file mode 100644 index 0000000000000000000000000000000000000000..4b695f76ac7ef2d28e6548b0385991d353a0fa2c --- /dev/null +++ b/ldm_patched/ldm/modules/diffusionmodules/openaimodel.py @@ -0,0 +1,886 @@ +from abc import abstractmethod + +import torch as th +import torch.nn as nn +import torch.nn.functional as F +from einops import rearrange + +from .util import ( + checkpoint, + avg_pool_nd, + zero_module, + timestep_embedding, + AlphaBlender, +) +from ..attention import SpatialTransformer, SpatialVideoTransformer, default +from ldm_patched.ldm.util import exists +import ldm_patched.modules.ops +ops = ldm_patched.modules.ops.disable_weight_init + +class TimestepBlock(nn.Module): + """ + Any module where forward() takes timestep embeddings as a second argument. + """ + + @abstractmethod + def forward(self, x, emb): + """ + Apply the module to `x` given `emb` timestep embeddings. + """ + +#This is needed because accelerate makes a copy of transformer_options which breaks "transformer_index" +def forward_timestep_embed(ts, x, emb, context=None, transformer_options={}, output_shape=None, time_context=None, num_video_frames=None, image_only_indicator=None): + for layer in ts: + if isinstance(layer, VideoResBlock): + x = layer(x, emb, num_video_frames, image_only_indicator) + elif isinstance(layer, TimestepBlock): + x = layer(x, emb) + elif isinstance(layer, SpatialVideoTransformer): + x = layer(x, context, time_context, num_video_frames, image_only_indicator, transformer_options) + if "transformer_index" in transformer_options: + transformer_options["transformer_index"] += 1 + elif isinstance(layer, SpatialTransformer): + x = layer(x, context, transformer_options) + if "transformer_index" in transformer_options: + transformer_options["transformer_index"] += 1 + elif isinstance(layer, Upsample): + x = layer(x, output_shape=output_shape) + else: + x = layer(x) + return x + +class TimestepEmbedSequential(nn.Sequential, TimestepBlock): + """ + A sequential module that passes timestep embeddings to the children that + support it as an extra input. + """ + + def forward(self, *args, **kwargs): + return forward_timestep_embed(self, *args, **kwargs) + +class Upsample(nn.Module): + """ + An upsampling layer with an optional convolution. + :param channels: channels in the inputs and outputs. + :param use_conv: a bool determining if a convolution is applied. + :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then + upsampling occurs in the inner-two dimensions. + """ + + def __init__(self, channels, use_conv, dims=2, out_channels=None, padding=1, dtype=None, device=None, operations=ops): + super().__init__() + self.channels = channels + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.dims = dims + if use_conv: + self.conv = operations.conv_nd(dims, self.channels, self.out_channels, 3, padding=padding, dtype=dtype, device=device) + + def forward(self, x, output_shape=None): + assert x.shape[1] == self.channels + if self.dims == 3: + shape = [x.shape[2], x.shape[3] * 2, x.shape[4] * 2] + if output_shape is not None: + shape[1] = output_shape[3] + shape[2] = output_shape[4] + else: + shape = [x.shape[2] * 2, x.shape[3] * 2] + if output_shape is not None: + shape[0] = output_shape[2] + shape[1] = output_shape[3] + + x = F.interpolate(x, size=shape, mode="nearest") + if self.use_conv: + x = self.conv(x) + return x + +class Downsample(nn.Module): + """ + A downsampling layer with an optional convolution. + :param channels: channels in the inputs and outputs. + :param use_conv: a bool determining if a convolution is applied. + :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then + downsampling occurs in the inner-two dimensions. + """ + + def __init__(self, channels, use_conv, dims=2, out_channels=None, padding=1, dtype=None, device=None, operations=ops): + super().__init__() + self.channels = channels + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.dims = dims + stride = 2 if dims != 3 else (1, 2, 2) + if use_conv: + self.op = operations.conv_nd( + dims, self.channels, self.out_channels, 3, stride=stride, padding=padding, dtype=dtype, device=device + ) + else: + assert self.channels == self.out_channels + self.op = avg_pool_nd(dims, kernel_size=stride, stride=stride) + + def forward(self, x): + assert x.shape[1] == self.channels + return self.op(x) + + +class ResBlock(TimestepBlock): + """ + A residual block that can optionally change the number of channels. + :param channels: the number of input channels. + :param emb_channels: the number of timestep embedding channels. + :param dropout: the rate of dropout. + :param out_channels: if specified, the number of out channels. + :param use_conv: if True and out_channels is specified, use a spatial + convolution instead of a smaller 1x1 convolution to change the + channels in the skip connection. + :param dims: determines if the signal is 1D, 2D, or 3D. + :param use_checkpoint: if True, use gradient checkpointing on this module. + :param up: if True, use this block for upsampling. + :param down: if True, use this block for downsampling. + """ + + def __init__( + self, + channels, + emb_channels, + dropout, + out_channels=None, + use_conv=False, + use_scale_shift_norm=False, + dims=2, + use_checkpoint=False, + up=False, + down=False, + kernel_size=3, + exchange_temb_dims=False, + skip_t_emb=False, + dtype=None, + device=None, + operations=ops + ): + super().__init__() + self.channels = channels + self.emb_channels = emb_channels + self.dropout = dropout + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.use_checkpoint = use_checkpoint + self.use_scale_shift_norm = use_scale_shift_norm + self.exchange_temb_dims = exchange_temb_dims + + if isinstance(kernel_size, list): + padding = [k // 2 for k in kernel_size] + else: + padding = kernel_size // 2 + + self.in_layers = nn.Sequential( + operations.GroupNorm(32, channels, dtype=dtype, device=device), + nn.SiLU(), + operations.conv_nd(dims, channels, self.out_channels, kernel_size, padding=padding, dtype=dtype, device=device), + ) + + self.updown = up or down + + if up: + self.h_upd = Upsample(channels, False, dims, dtype=dtype, device=device) + self.x_upd = Upsample(channels, False, dims, dtype=dtype, device=device) + elif down: + self.h_upd = Downsample(channels, False, dims, dtype=dtype, device=device) + self.x_upd = Downsample(channels, False, dims, dtype=dtype, device=device) + else: + self.h_upd = self.x_upd = nn.Identity() + + self.skip_t_emb = skip_t_emb + if self.skip_t_emb: + self.emb_layers = None + self.exchange_temb_dims = False + else: + self.emb_layers = nn.Sequential( + nn.SiLU(), + operations.Linear( + emb_channels, + 2 * self.out_channels if use_scale_shift_norm else self.out_channels, dtype=dtype, device=device + ), + ) + self.out_layers = nn.Sequential( + operations.GroupNorm(32, self.out_channels, dtype=dtype, device=device), + nn.SiLU(), + nn.Dropout(p=dropout), + operations.conv_nd(dims, self.out_channels, self.out_channels, kernel_size, padding=padding, dtype=dtype, device=device) + , + ) + + if self.out_channels == channels: + self.skip_connection = nn.Identity() + elif use_conv: + self.skip_connection = operations.conv_nd( + dims, channels, self.out_channels, kernel_size, padding=padding, dtype=dtype, device=device + ) + else: + self.skip_connection = operations.conv_nd(dims, channels, self.out_channels, 1, dtype=dtype, device=device) + + def forward(self, x, emb): + """ + Apply the block to a Tensor, conditioned on a timestep embedding. + :param x: an [N x C x ...] Tensor of features. + :param emb: an [N x emb_channels] Tensor of timestep embeddings. + :return: an [N x C x ...] Tensor of outputs. + """ + return checkpoint( + self._forward, (x, emb), self.parameters(), self.use_checkpoint + ) + + + def _forward(self, x, emb): + if self.updown: + in_rest, in_conv = self.in_layers[:-1], self.in_layers[-1] + h = in_rest(x) + h = self.h_upd(h) + x = self.x_upd(x) + h = in_conv(h) + else: + h = self.in_layers(x) + + emb_out = None + if not self.skip_t_emb: + emb_out = self.emb_layers(emb).type(h.dtype) + while len(emb_out.shape) < len(h.shape): + emb_out = emb_out[..., None] + if self.use_scale_shift_norm: + out_norm, out_rest = self.out_layers[0], self.out_layers[1:] + h = out_norm(h) + if emb_out is not None: + scale, shift = th.chunk(emb_out, 2, dim=1) + h *= (1 + scale) + h += shift + h = out_rest(h) + else: + if emb_out is not None: + if self.exchange_temb_dims: + emb_out = rearrange(emb_out, "b t c ... -> b c t ...") + h = h + emb_out + h = self.out_layers(h) + return self.skip_connection(x) + h + + +class VideoResBlock(ResBlock): + def __init__( + self, + channels: int, + emb_channels: int, + dropout: float, + video_kernel_size=3, + merge_strategy: str = "fixed", + merge_factor: float = 0.5, + out_channels=None, + use_conv: bool = False, + use_scale_shift_norm: bool = False, + dims: int = 2, + use_checkpoint: bool = False, + up: bool = False, + down: bool = False, + dtype=None, + device=None, + operations=ops + ): + super().__init__( + channels, + emb_channels, + dropout, + out_channels=out_channels, + use_conv=use_conv, + use_scale_shift_norm=use_scale_shift_norm, + dims=dims, + use_checkpoint=use_checkpoint, + up=up, + down=down, + dtype=dtype, + device=device, + operations=operations + ) + + self.time_stack = ResBlock( + default(out_channels, channels), + emb_channels, + dropout=dropout, + dims=3, + out_channels=default(out_channels, channels), + use_scale_shift_norm=False, + use_conv=False, + up=False, + down=False, + kernel_size=video_kernel_size, + use_checkpoint=use_checkpoint, + exchange_temb_dims=True, + dtype=dtype, + device=device, + operations=operations + ) + self.time_mixer = AlphaBlender( + alpha=merge_factor, + merge_strategy=merge_strategy, + rearrange_pattern="b t -> b 1 t 1 1", + ) + + def forward( + self, + x: th.Tensor, + emb: th.Tensor, + num_video_frames: int, + image_only_indicator = None, + ) -> th.Tensor: + x = super().forward(x, emb) + + x_mix = rearrange(x, "(b t) c h w -> b c t h w", t=num_video_frames) + x = rearrange(x, "(b t) c h w -> b c t h w", t=num_video_frames) + + x = self.time_stack( + x, rearrange(emb, "(b t) ... -> b t ...", t=num_video_frames) + ) + x = self.time_mixer( + x_spatial=x_mix, x_temporal=x, image_only_indicator=image_only_indicator + ) + x = rearrange(x, "b c t h w -> (b t) c h w") + return x + + +class Timestep(nn.Module): + def __init__(self, dim): + super().__init__() + self.dim = dim + + def forward(self, t): + return timestep_embedding(t, self.dim) + +def apply_control(h, control, name): + if control is not None and name in control and len(control[name]) > 0: + ctrl = control[name].pop() + if ctrl is not None: + try: + h += ctrl + except: + print("warning control could not be applied", h.shape, ctrl.shape) + return h + +class UNetModel(nn.Module): + """ + The full UNet model with attention and timestep embedding. + :param in_channels: channels in the input Tensor. + :param model_channels: base channel count for the model. + :param out_channels: channels in the output Tensor. + :param num_res_blocks: number of residual blocks per downsample. + :param dropout: the dropout probability. + :param channel_mult: channel multiplier for each level of the UNet. + :param conv_resample: if True, use learned convolutions for upsampling and + downsampling. + :param dims: determines if the signal is 1D, 2D, or 3D. + :param num_classes: if specified (as an int), then this model will be + class-conditional with `num_classes` classes. + :param use_checkpoint: use gradient checkpointing to reduce memory usage. + :param num_heads: the number of attention heads in each attention layer. + :param num_heads_channels: if specified, ignore num_heads and instead use + a fixed channel width per attention head. + :param num_heads_upsample: works with num_heads to set a different number + of heads for upsampling. Deprecated. + :param use_scale_shift_norm: use a FiLM-like conditioning mechanism. + :param resblock_updown: use residual blocks for up/downsampling. + :param use_new_attention_order: use a different attention pattern for potentially + increased efficiency. + """ + + def __init__( + self, + image_size, + in_channels, + model_channels, + out_channels, + num_res_blocks, + dropout=0, + channel_mult=(1, 2, 4, 8), + conv_resample=True, + dims=2, + num_classes=None, + use_checkpoint=False, + dtype=th.float32, + num_heads=-1, + num_head_channels=-1, + num_heads_upsample=-1, + use_scale_shift_norm=False, + resblock_updown=False, + use_new_attention_order=False, + use_spatial_transformer=False, # custom transformer support + transformer_depth=1, # custom transformer support + context_dim=None, # custom transformer support + n_embed=None, # custom support for prediction of discrete ids into codebook of first stage vq model + legacy=True, + disable_self_attentions=None, + num_attention_blocks=None, + disable_middle_self_attn=False, + use_linear_in_transformer=False, + adm_in_channels=None, + transformer_depth_middle=None, + transformer_depth_output=None, + use_temporal_resblock=False, + use_temporal_attention=False, + time_context_dim=None, + extra_ff_mix_layer=False, + use_spatial_context=False, + merge_strategy=None, + merge_factor=0.0, + video_kernel_size=None, + disable_temporal_crossattention=False, + max_ddpm_temb_period=10000, + device=None, + operations=ops, + ): + super().__init__() + + if context_dim is not None: + assert use_spatial_transformer, 'Fool!! You forgot to use the spatial transformer for your cross-attention conditioning...' + # from omegaconf.listconfig import ListConfig + # if type(context_dim) == ListConfig: + # context_dim = list(context_dim) + + if num_heads_upsample == -1: + num_heads_upsample = num_heads + + if num_heads == -1: + assert num_head_channels != -1, 'Either num_heads or num_head_channels has to be set' + + if num_head_channels == -1: + assert num_heads != -1, 'Either num_heads or num_head_channels has to be set' + + self.in_channels = in_channels + self.model_channels = model_channels + self.out_channels = out_channels + + if isinstance(num_res_blocks, int): + self.num_res_blocks = len(channel_mult) * [num_res_blocks] + else: + if len(num_res_blocks) != len(channel_mult): + raise ValueError("provide num_res_blocks either as an int (globally constant) or " + "as a list/tuple (per-level) with the same length as channel_mult") + self.num_res_blocks = num_res_blocks + + if disable_self_attentions is not None: + # should be a list of booleans, indicating whether to disable self-attention in TransformerBlocks or not + assert len(disable_self_attentions) == len(channel_mult) + if num_attention_blocks is not None: + assert len(num_attention_blocks) == len(self.num_res_blocks) + + transformer_depth = transformer_depth[:] + transformer_depth_output = transformer_depth_output[:] + + self.dropout = dropout + self.channel_mult = channel_mult + self.conv_resample = conv_resample + self.num_classes = num_classes + self.use_checkpoint = use_checkpoint + self.dtype = dtype + self.num_heads = num_heads + self.num_head_channels = num_head_channels + self.num_heads_upsample = num_heads_upsample + self.use_temporal_resblocks = use_temporal_resblock + self.predict_codebook_ids = n_embed is not None + + self.default_num_video_frames = None + self.default_image_only_indicator = None + + time_embed_dim = model_channels * 4 + self.time_embed = nn.Sequential( + operations.Linear(model_channels, time_embed_dim, dtype=self.dtype, device=device), + nn.SiLU(), + operations.Linear(time_embed_dim, time_embed_dim, dtype=self.dtype, device=device), + ) + + if self.num_classes is not None: + if isinstance(self.num_classes, int): + self.label_emb = nn.Embedding(num_classes, time_embed_dim, dtype=self.dtype, device=device) + elif self.num_classes == "continuous": + print("setting up linear c_adm embedding layer") + self.label_emb = nn.Linear(1, time_embed_dim) + elif self.num_classes == "sequential": + assert adm_in_channels is not None + self.label_emb = nn.Sequential( + nn.Sequential( + operations.Linear(adm_in_channels, time_embed_dim, dtype=self.dtype, device=device), + nn.SiLU(), + operations.Linear(time_embed_dim, time_embed_dim, dtype=self.dtype, device=device), + ) + ) + else: + raise ValueError() + + self.input_blocks = nn.ModuleList( + [ + TimestepEmbedSequential( + operations.conv_nd(dims, in_channels, model_channels, 3, padding=1, dtype=self.dtype, device=device) + ) + ] + ) + self._feature_size = model_channels + input_block_chans = [model_channels] + ch = model_channels + ds = 1 + + def get_attention_layer( + ch, + num_heads, + dim_head, + depth=1, + context_dim=None, + use_checkpoint=False, + disable_self_attn=False, + ): + if use_temporal_attention: + return SpatialVideoTransformer( + ch, + num_heads, + dim_head, + depth=depth, + context_dim=context_dim, + time_context_dim=time_context_dim, + dropout=dropout, + ff_in=extra_ff_mix_layer, + use_spatial_context=use_spatial_context, + merge_strategy=merge_strategy, + merge_factor=merge_factor, + checkpoint=use_checkpoint, + use_linear=use_linear_in_transformer, + disable_self_attn=disable_self_attn, + disable_temporal_crossattention=disable_temporal_crossattention, + max_time_embed_period=max_ddpm_temb_period, + dtype=self.dtype, device=device, operations=operations + ) + else: + return SpatialTransformer( + ch, num_heads, dim_head, depth=depth, context_dim=context_dim, + disable_self_attn=disable_self_attn, use_linear=use_linear_in_transformer, + use_checkpoint=use_checkpoint, dtype=self.dtype, device=device, operations=operations + ) + + def get_resblock( + merge_factor, + merge_strategy, + video_kernel_size, + ch, + time_embed_dim, + dropout, + out_channels, + dims, + use_checkpoint, + use_scale_shift_norm, + down=False, + up=False, + dtype=None, + device=None, + operations=ops + ): + if self.use_temporal_resblocks: + return VideoResBlock( + merge_factor=merge_factor, + merge_strategy=merge_strategy, + video_kernel_size=video_kernel_size, + channels=ch, + emb_channels=time_embed_dim, + dropout=dropout, + out_channels=out_channels, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + down=down, + up=up, + dtype=dtype, + device=device, + operations=operations + ) + else: + return ResBlock( + channels=ch, + emb_channels=time_embed_dim, + dropout=dropout, + out_channels=out_channels, + use_checkpoint=use_checkpoint, + dims=dims, + use_scale_shift_norm=use_scale_shift_norm, + down=down, + up=up, + dtype=dtype, + device=device, + operations=operations + ) + + for level, mult in enumerate(channel_mult): + for nr in range(self.num_res_blocks[level]): + layers = [ + get_resblock( + merge_factor=merge_factor, + merge_strategy=merge_strategy, + video_kernel_size=video_kernel_size, + ch=ch, + time_embed_dim=time_embed_dim, + dropout=dropout, + out_channels=mult * model_channels, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + dtype=self.dtype, + device=device, + operations=operations, + ) + ] + ch = mult * model_channels + num_transformers = transformer_depth.pop(0) + if num_transformers > 0: + if num_head_channels == -1: + dim_head = ch // num_heads + else: + num_heads = ch // num_head_channels + dim_head = num_head_channels + if legacy: + #num_heads = 1 + dim_head = ch // num_heads if use_spatial_transformer else num_head_channels + if exists(disable_self_attentions): + disabled_sa = disable_self_attentions[level] + else: + disabled_sa = False + + if not exists(num_attention_blocks) or nr < num_attention_blocks[level]: + layers.append(get_attention_layer( + ch, num_heads, dim_head, depth=num_transformers, context_dim=context_dim, + disable_self_attn=disabled_sa, use_checkpoint=use_checkpoint) + ) + self.input_blocks.append(TimestepEmbedSequential(*layers)) + self._feature_size += ch + input_block_chans.append(ch) + if level != len(channel_mult) - 1: + out_ch = ch + self.input_blocks.append( + TimestepEmbedSequential( + get_resblock( + merge_factor=merge_factor, + merge_strategy=merge_strategy, + video_kernel_size=video_kernel_size, + ch=ch, + time_embed_dim=time_embed_dim, + dropout=dropout, + out_channels=out_ch, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + down=True, + dtype=self.dtype, + device=device, + operations=operations + ) + if resblock_updown + else Downsample( + ch, conv_resample, dims=dims, out_channels=out_ch, dtype=self.dtype, device=device, operations=operations + ) + ) + ) + ch = out_ch + input_block_chans.append(ch) + ds *= 2 + self._feature_size += ch + + if num_head_channels == -1: + dim_head = ch // num_heads + else: + num_heads = ch // num_head_channels + dim_head = num_head_channels + if legacy: + #num_heads = 1 + dim_head = ch // num_heads if use_spatial_transformer else num_head_channels + mid_block = [ + get_resblock( + merge_factor=merge_factor, + merge_strategy=merge_strategy, + video_kernel_size=video_kernel_size, + ch=ch, + time_embed_dim=time_embed_dim, + dropout=dropout, + out_channels=None, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + dtype=self.dtype, + device=device, + operations=operations + )] + if transformer_depth_middle >= 0: + mid_block += [get_attention_layer( # always uses a self-attn + ch, num_heads, dim_head, depth=transformer_depth_middle, context_dim=context_dim, + disable_self_attn=disable_middle_self_attn, use_checkpoint=use_checkpoint + ), + get_resblock( + merge_factor=merge_factor, + merge_strategy=merge_strategy, + video_kernel_size=video_kernel_size, + ch=ch, + time_embed_dim=time_embed_dim, + dropout=dropout, + out_channels=None, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + dtype=self.dtype, + device=device, + operations=operations + )] + self.middle_block = TimestepEmbedSequential(*mid_block) + self._feature_size += ch + + self.output_blocks = nn.ModuleList([]) + for level, mult in list(enumerate(channel_mult))[::-1]: + for i in range(self.num_res_blocks[level] + 1): + ich = input_block_chans.pop() + layers = [ + get_resblock( + merge_factor=merge_factor, + merge_strategy=merge_strategy, + video_kernel_size=video_kernel_size, + ch=ch + ich, + time_embed_dim=time_embed_dim, + dropout=dropout, + out_channels=model_channels * mult, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + dtype=self.dtype, + device=device, + operations=operations + ) + ] + ch = model_channels * mult + num_transformers = transformer_depth_output.pop() + if num_transformers > 0: + if num_head_channels == -1: + dim_head = ch // num_heads + else: + num_heads = ch // num_head_channels + dim_head = num_head_channels + if legacy: + #num_heads = 1 + dim_head = ch // num_heads if use_spatial_transformer else num_head_channels + if exists(disable_self_attentions): + disabled_sa = disable_self_attentions[level] + else: + disabled_sa = False + + if not exists(num_attention_blocks) or i < num_attention_blocks[level]: + layers.append( + get_attention_layer( + ch, num_heads, dim_head, depth=num_transformers, context_dim=context_dim, + disable_self_attn=disabled_sa, use_checkpoint=use_checkpoint + ) + ) + if level and i == self.num_res_blocks[level]: + out_ch = ch + layers.append( + get_resblock( + merge_factor=merge_factor, + merge_strategy=merge_strategy, + video_kernel_size=video_kernel_size, + ch=ch, + time_embed_dim=time_embed_dim, + dropout=dropout, + out_channels=out_ch, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + up=True, + dtype=self.dtype, + device=device, + operations=operations + ) + if resblock_updown + else Upsample(ch, conv_resample, dims=dims, out_channels=out_ch, dtype=self.dtype, device=device, operations=operations) + ) + ds //= 2 + self.output_blocks.append(TimestepEmbedSequential(*layers)) + self._feature_size += ch + + self.out = nn.Sequential( + operations.GroupNorm(32, ch, dtype=self.dtype, device=device), + nn.SiLU(), + zero_module(operations.conv_nd(dims, model_channels, out_channels, 3, padding=1, dtype=self.dtype, device=device)), + ) + if self.predict_codebook_ids: + self.id_predictor = nn.Sequential( + operations.GroupNorm(32, ch, dtype=self.dtype, device=device), + operations.conv_nd(dims, model_channels, n_embed, 1, dtype=self.dtype, device=device), + #nn.LogSoftmax(dim=1) # change to cross_entropy and produce non-normalized logits + ) + + def forward(self, x, timesteps=None, context=None, y=None, control=None, transformer_options={}, **kwargs): + """ + Apply the model to an input batch. + :param x: an [N x C x ...] Tensor of inputs. + :param timesteps: a 1-D batch of timesteps. + :param context: conditioning plugged in via crossattn + :param y: an [N] Tensor of labels, if class-conditional. + :return: an [N x C x ...] Tensor of outputs. + """ + transformer_options["original_shape"] = list(x.shape) + transformer_options["transformer_index"] = 0 + transformer_patches = transformer_options.get("patches", {}) + + num_video_frames = kwargs.get("num_video_frames", self.default_num_video_frames) + image_only_indicator = kwargs.get("image_only_indicator", self.default_image_only_indicator) + time_context = kwargs.get("time_context", None) + + assert (y is not None) == ( + self.num_classes is not None + ), "must specify y if and only if the model is class-conditional" + hs = [] + t_emb = timestep_embedding(timesteps, self.model_channels, repeat_only=False).to(x.dtype) + emb = self.time_embed(t_emb) + + if self.num_classes is not None: + assert y.shape[0] == x.shape[0] + emb = emb + self.label_emb(y) + + h = x + for id, module in enumerate(self.input_blocks): + transformer_options["block"] = ("input", id) + h = forward_timestep_embed(module, h, emb, context, transformer_options, time_context=time_context, num_video_frames=num_video_frames, image_only_indicator=image_only_indicator) + h = apply_control(h, control, 'input') + if "input_block_patch" in transformer_patches: + patch = transformer_patches["input_block_patch"] + for p in patch: + h = p(h, transformer_options) + + hs.append(h) + if "input_block_patch_after_skip" in transformer_patches: + patch = transformer_patches["input_block_patch_after_skip"] + for p in patch: + h = p(h, transformer_options) + + transformer_options["block"] = ("middle", 0) + h = forward_timestep_embed(self.middle_block, h, emb, context, transformer_options, time_context=time_context, num_video_frames=num_video_frames, image_only_indicator=image_only_indicator) + h = apply_control(h, control, 'middle') + + + for id, module in enumerate(self.output_blocks): + transformer_options["block"] = ("output", id) + hsp = hs.pop() + hsp = apply_control(hsp, control, 'output') + + if "output_block_patch" in transformer_patches: + patch = transformer_patches["output_block_patch"] + for p in patch: + h, hsp = p(h, hsp, transformer_options) + + h = th.cat([h, hsp], dim=1) + del hsp + if len(hs) > 0: + output_shape = hs[-1].shape + else: + output_shape = None + h = forward_timestep_embed(module, h, emb, context, transformer_options, output_shape, time_context=time_context, num_video_frames=num_video_frames, image_only_indicator=image_only_indicator) + h = h.type(x.dtype) + if self.predict_codebook_ids: + return self.id_predictor(h) + else: + return self.out(h) diff --git a/ldm_patched/ldm/modules/diffusionmodules/upscaling.py b/ldm_patched/ldm/modules/diffusionmodules/upscaling.py new file mode 100644 index 0000000000000000000000000000000000000000..a38bff57794be08f5175386384fe85d68b38536f --- /dev/null +++ b/ldm_patched/ldm/modules/diffusionmodules/upscaling.py @@ -0,0 +1,85 @@ +import torch +import torch.nn as nn +import numpy as np +from functools import partial + +from .util import extract_into_tensor, make_beta_schedule +from ldm_patched.ldm.util import default + + +class AbstractLowScaleModel(nn.Module): + # for concatenating a downsampled image to the latent representation + def __init__(self, noise_schedule_config=None): + super(AbstractLowScaleModel, self).__init__() + if noise_schedule_config is not None: + self.register_schedule(**noise_schedule_config) + + def register_schedule(self, beta_schedule="linear", timesteps=1000, + linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3): + betas = make_beta_schedule(beta_schedule, timesteps, linear_start=linear_start, linear_end=linear_end, + cosine_s=cosine_s) + alphas = 1. - betas + alphas_cumprod = np.cumprod(alphas, axis=0) + alphas_cumprod_prev = np.append(1., alphas_cumprod[:-1]) + + timesteps, = betas.shape + self.num_timesteps = int(timesteps) + self.linear_start = linear_start + self.linear_end = linear_end + assert alphas_cumprod.shape[0] == self.num_timesteps, 'alphas have to be defined for each timestep' + + to_torch = partial(torch.tensor, dtype=torch.float32) + + self.register_buffer('betas', to_torch(betas)) + self.register_buffer('alphas_cumprod', to_torch(alphas_cumprod)) + self.register_buffer('alphas_cumprod_prev', to_torch(alphas_cumprod_prev)) + + # calculations for diffusion q(x_t | x_{t-1}) and others + self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod))) + self.register_buffer('sqrt_one_minus_alphas_cumprod', to_torch(np.sqrt(1. - alphas_cumprod))) + self.register_buffer('log_one_minus_alphas_cumprod', to_torch(np.log(1. - alphas_cumprod))) + self.register_buffer('sqrt_recip_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod))) + self.register_buffer('sqrt_recipm1_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod - 1))) + + def q_sample(self, x_start, t, noise=None, seed=None): + if noise is None: + if seed is None: + noise = torch.randn_like(x_start) + else: + noise = torch.randn(x_start.size(), dtype=x_start.dtype, layout=x_start.layout, generator=torch.manual_seed(seed)).to(x_start.device) + return (extract_into_tensor(self.sqrt_alphas_cumprod.to(x_start.device), t, x_start.shape) * x_start + + extract_into_tensor(self.sqrt_one_minus_alphas_cumprod.to(x_start.device), t, x_start.shape) * noise) + + def forward(self, x): + return x, None + + def decode(self, x): + return x + + +class SimpleImageConcat(AbstractLowScaleModel): + # no noise level conditioning + def __init__(self): + super(SimpleImageConcat, self).__init__(noise_schedule_config=None) + self.max_noise_level = 0 + + def forward(self, x): + # fix to constant noise level + return x, torch.zeros(x.shape[0], device=x.device).long() + + +class ImageConcatWithNoiseAugmentation(AbstractLowScaleModel): + def __init__(self, noise_schedule_config, max_noise_level=1000, to_cuda=False): + super().__init__(noise_schedule_config=noise_schedule_config) + self.max_noise_level = max_noise_level + + def forward(self, x, noise_level=None, seed=None): + if noise_level is None: + noise_level = torch.randint(0, self.max_noise_level, (x.shape[0],), device=x.device).long() + else: + assert isinstance(noise_level, torch.Tensor) + z = self.q_sample(x, noise_level, seed=seed) + return z, noise_level + + + diff --git a/ldm_patched/ldm/modules/diffusionmodules/util.py b/ldm_patched/ldm/modules/diffusionmodules/util.py new file mode 100644 index 0000000000000000000000000000000000000000..e261e06a3fd74fe38a0f986891d92771a6a72ab4 --- /dev/null +++ b/ldm_patched/ldm/modules/diffusionmodules/util.py @@ -0,0 +1,304 @@ +# adopted from +# https://github.com/openai/improved-diffusion/blob/main/improved_diffusion/gaussian_diffusion.py +# and +# https://github.com/lucidrains/denoising-diffusion-pytorch/blob/7706bdfc6f527f58d33f84b7b522e61e6e3164b3/denoising_diffusion_pytorch/denoising_diffusion_pytorch.py +# and +# https://github.com/openai/guided-diffusion/blob/0ba878e517b276c45d1195eb29f6f5f72659a05b/guided_diffusion/nn.py +# +# thanks! + + +import os +import math +import torch +import torch.nn as nn +import numpy as np +from einops import repeat, rearrange + +from ldm_patched.ldm.util import instantiate_from_config + +class AlphaBlender(nn.Module): + strategies = ["learned", "fixed", "learned_with_images"] + + def __init__( + self, + alpha: float, + merge_strategy: str = "learned_with_images", + rearrange_pattern: str = "b t -> (b t) 1 1", + ): + super().__init__() + self.merge_strategy = merge_strategy + self.rearrange_pattern = rearrange_pattern + + assert ( + merge_strategy in self.strategies + ), f"merge_strategy needs to be in {self.strategies}" + + if self.merge_strategy == "fixed": + self.register_buffer("mix_factor", torch.Tensor([alpha])) + elif ( + self.merge_strategy == "learned" + or self.merge_strategy == "learned_with_images" + ): + self.register_parameter( + "mix_factor", torch.nn.Parameter(torch.Tensor([alpha])) + ) + else: + raise ValueError(f"unknown merge strategy {self.merge_strategy}") + + def get_alpha(self, image_only_indicator: torch.Tensor) -> torch.Tensor: + # skip_time_mix = rearrange(repeat(skip_time_mix, 'b -> (b t) () () ()', t=t), '(b t) 1 ... -> b 1 t ...', t=t) + if self.merge_strategy == "fixed": + # make shape compatible + # alpha = repeat(self.mix_factor, '1 -> b () t () ()', t=t, b=bs) + alpha = self.mix_factor.to(image_only_indicator.device) + elif self.merge_strategy == "learned": + alpha = torch.sigmoid(self.mix_factor.to(image_only_indicator.device)) + # make shape compatible + # alpha = repeat(alpha, '1 -> s () ()', s = t * bs) + elif self.merge_strategy == "learned_with_images": + assert image_only_indicator is not None, "need image_only_indicator ..." + alpha = torch.where( + image_only_indicator.bool(), + torch.ones(1, 1, device=image_only_indicator.device), + rearrange(torch.sigmoid(self.mix_factor.to(image_only_indicator.device)), "... -> ... 1"), + ) + alpha = rearrange(alpha, self.rearrange_pattern) + # make shape compatible + # alpha = repeat(alpha, '1 -> s () ()', s = t * bs) + else: + raise NotImplementedError() + return alpha + + def forward( + self, + x_spatial, + x_temporal, + image_only_indicator=None, + ) -> torch.Tensor: + alpha = self.get_alpha(image_only_indicator) + x = ( + alpha.to(x_spatial.dtype) * x_spatial + + (1.0 - alpha).to(x_spatial.dtype) * x_temporal + ) + return x + + +def make_beta_schedule(schedule, n_timestep, linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3): + if schedule == "linear": + betas = ( + torch.linspace(linear_start ** 0.5, linear_end ** 0.5, n_timestep, dtype=torch.float64) ** 2 + ) + + elif schedule == "cosine": + timesteps = ( + torch.arange(n_timestep + 1, dtype=torch.float64) / n_timestep + cosine_s + ) + alphas = timesteps / (1 + cosine_s) * np.pi / 2 + alphas = torch.cos(alphas).pow(2) + alphas = alphas / alphas[0] + betas = 1 - alphas[1:] / alphas[:-1] + betas = np.clip(betas, a_min=0, a_max=0.999) + + elif schedule == "squaredcos_cap_v2": # used for karlo prior + # return early + return betas_for_alpha_bar( + n_timestep, + lambda t: math.cos((t + 0.008) / 1.008 * math.pi / 2) ** 2, + ) + + elif schedule == "sqrt_linear": + betas = torch.linspace(linear_start, linear_end, n_timestep, dtype=torch.float64) + elif schedule == "sqrt": + betas = torch.linspace(linear_start, linear_end, n_timestep, dtype=torch.float64) ** 0.5 + else: + raise ValueError(f"schedule '{schedule}' unknown.") + return betas.numpy() + + +def make_ddim_timesteps(ddim_discr_method, num_ddim_timesteps, num_ddpm_timesteps, verbose=True): + if ddim_discr_method == 'uniform': + c = num_ddpm_timesteps // num_ddim_timesteps + ddim_timesteps = np.asarray(list(range(0, num_ddpm_timesteps, c))) + elif ddim_discr_method == 'quad': + ddim_timesteps = ((np.linspace(0, np.sqrt(num_ddpm_timesteps * .8), num_ddim_timesteps)) ** 2).astype(int) + else: + raise NotImplementedError(f'There is no ddim discretization method called "{ddim_discr_method}"') + + # assert ddim_timesteps.shape[0] == num_ddim_timesteps + # add one to get the final alpha values right (the ones from first scale to data during sampling) + steps_out = ddim_timesteps + 1 + if verbose: + print(f'Selected timesteps for ddim sampler: {steps_out}') + return steps_out + + +def make_ddim_sampling_parameters(alphacums, ddim_timesteps, eta, verbose=True): + # select alphas for computing the variance schedule + alphas = alphacums[ddim_timesteps] + alphas_prev = np.asarray([alphacums[0]] + alphacums[ddim_timesteps[:-1]].tolist()) + + # according the the formula provided in https://arxiv.org/abs/2010.02502 + sigmas = eta * np.sqrt((1 - alphas_prev) / (1 - alphas) * (1 - alphas / alphas_prev)) + if verbose: + print(f'Selected alphas for ddim sampler: a_t: {alphas}; a_(t-1): {alphas_prev}') + print(f'For the chosen value of eta, which is {eta}, ' + f'this results in the following sigma_t schedule for ddim sampler {sigmas}') + return sigmas, alphas, alphas_prev + + +def betas_for_alpha_bar(num_diffusion_timesteps, alpha_bar, max_beta=0.999): + """ + Create a beta schedule that discretizes the given alpha_t_bar function, + which defines the cumulative product of (1-beta) over time from t = [0,1]. + :param num_diffusion_timesteps: the number of betas to produce. + :param alpha_bar: a lambda that takes an argument t from 0 to 1 and + produces the cumulative product of (1-beta) up to that + part of the diffusion process. + :param max_beta: the maximum beta to use; use values lower than 1 to + prevent singularities. + """ + betas = [] + for i in range(num_diffusion_timesteps): + t1 = i / num_diffusion_timesteps + t2 = (i + 1) / num_diffusion_timesteps + betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta)) + return np.array(betas) + + +def extract_into_tensor(a, t, x_shape): + b, *_ = t.shape + out = a.gather(-1, t) + return out.reshape(b, *((1,) * (len(x_shape) - 1))) + + +def checkpoint(func, inputs, params, flag): + """ + Evaluate a function without caching intermediate activations, allowing for + reduced memory at the expense of extra compute in the backward pass. + :param func: the function to evaluate. + :param inputs: the argument sequence to pass to `func`. + :param params: a sequence of parameters `func` depends on but does not + explicitly take as arguments. + :param flag: if False, disable gradient checkpointing. + """ + if flag: + args = tuple(inputs) + tuple(params) + return CheckpointFunction.apply(func, len(inputs), *args) + else: + return func(*inputs) + + +class CheckpointFunction(torch.autograd.Function): + @staticmethod + def forward(ctx, run_function, length, *args): + ctx.run_function = run_function + ctx.input_tensors = list(args[:length]) + ctx.input_params = list(args[length:]) + ctx.gpu_autocast_kwargs = {"enabled": torch.is_autocast_enabled(), + "dtype": torch.get_autocast_gpu_dtype(), + "cache_enabled": torch.is_autocast_cache_enabled()} + with torch.no_grad(): + output_tensors = ctx.run_function(*ctx.input_tensors) + return output_tensors + + @staticmethod + def backward(ctx, *output_grads): + ctx.input_tensors = [x.detach().requires_grad_(True) for x in ctx.input_tensors] + with torch.enable_grad(), \ + torch.cuda.amp.autocast(**ctx.gpu_autocast_kwargs): + # Fixes a bug where the first op in run_function modifies the + # Tensor storage in place, which is not allowed for detach()'d + # Tensors. + shallow_copies = [x.view_as(x) for x in ctx.input_tensors] + output_tensors = ctx.run_function(*shallow_copies) + input_grads = torch.autograd.grad( + output_tensors, + ctx.input_tensors + ctx.input_params, + output_grads, + allow_unused=True, + ) + del ctx.input_tensors + del ctx.input_params + del output_tensors + return (None, None) + input_grads + + +def timestep_embedding(timesteps, dim, max_period=10000, repeat_only=False): + """ + Create sinusoidal timestep embeddings. + :param timesteps: a 1-D Tensor of N indices, one per batch element. + These may be fractional. + :param dim: the dimension of the output. + :param max_period: controls the minimum frequency of the embeddings. + :return: an [N x dim] Tensor of positional embeddings. + """ + if not repeat_only: + half = dim // 2 + freqs = torch.exp( + -math.log(max_period) * torch.arange(start=0, end=half, dtype=torch.float32, device=timesteps.device) / half + ) + args = timesteps[:, None].float() * freqs[None] + embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1) + if dim % 2: + embedding = torch.cat([embedding, torch.zeros_like(embedding[:, :1])], dim=-1) + else: + embedding = repeat(timesteps, 'b -> b d', d=dim) + return embedding + + +def zero_module(module): + """ + Zero out the parameters of a module and return it. + """ + for p in module.parameters(): + p.detach().zero_() + return module + + +def scale_module(module, scale): + """ + Scale the parameters of a module and return it. + """ + for p in module.parameters(): + p.detach().mul_(scale) + return module + + +def mean_flat(tensor): + """ + Take the mean over all non-batch dimensions. + """ + return tensor.mean(dim=list(range(1, len(tensor.shape)))) + + +def avg_pool_nd(dims, *args, **kwargs): + """ + Create a 1D, 2D, or 3D average pooling module. + """ + if dims == 1: + return nn.AvgPool1d(*args, **kwargs) + elif dims == 2: + return nn.AvgPool2d(*args, **kwargs) + elif dims == 3: + return nn.AvgPool3d(*args, **kwargs) + raise ValueError(f"unsupported dimensions: {dims}") + + +class HybridConditioner(nn.Module): + + def __init__(self, c_concat_config, c_crossattn_config): + super().__init__() + self.concat_conditioner = instantiate_from_config(c_concat_config) + self.crossattn_conditioner = instantiate_from_config(c_crossattn_config) + + def forward(self, c_concat, c_crossattn): + c_concat = self.concat_conditioner(c_concat) + c_crossattn = self.crossattn_conditioner(c_crossattn) + return {'c_concat': [c_concat], 'c_crossattn': [c_crossattn]} + + +def noise_like(shape, device, repeat=False): + repeat_noise = lambda: torch.randn((1, *shape[1:]), device=device).repeat(shape[0], *((1,) * (len(shape) - 1))) + noise = lambda: torch.randn(shape, device=device) + return repeat_noise() if repeat else noise() diff --git a/ldm_patched/ldm/modules/distributions/__init__.py b/ldm_patched/ldm/modules/distributions/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ldm_patched/ldm/modules/distributions/__pycache__/__init__.cpython-310.pyc b/ldm_patched/ldm/modules/distributions/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5516c3bb83bb108ea5b638a8b8ef008343ff29ea Binary files /dev/null and b/ldm_patched/ldm/modules/distributions/__pycache__/__init__.cpython-310.pyc differ diff --git a/ldm_patched/ldm/modules/distributions/__pycache__/distributions.cpython-310.pyc b/ldm_patched/ldm/modules/distributions/__pycache__/distributions.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d0336392c611eed65a734fb206145d6cc8ea624a Binary files /dev/null and b/ldm_patched/ldm/modules/distributions/__pycache__/distributions.cpython-310.pyc differ diff --git a/ldm_patched/ldm/modules/distributions/distributions.py b/ldm_patched/ldm/modules/distributions/distributions.py new file mode 100644 index 0000000000000000000000000000000000000000..f2b8ef901130efc171aa69742ca0244d94d3f2e9 --- /dev/null +++ b/ldm_patched/ldm/modules/distributions/distributions.py @@ -0,0 +1,92 @@ +import torch +import numpy as np + + +class AbstractDistribution: + def sample(self): + raise NotImplementedError() + + def mode(self): + raise NotImplementedError() + + +class DiracDistribution(AbstractDistribution): + def __init__(self, value): + self.value = value + + def sample(self): + return self.value + + def mode(self): + return self.value + + +class DiagonalGaussianDistribution(object): + def __init__(self, parameters, deterministic=False): + self.parameters = parameters + self.mean, self.logvar = torch.chunk(parameters, 2, dim=1) + self.logvar = torch.clamp(self.logvar, -30.0, 20.0) + self.deterministic = deterministic + self.std = torch.exp(0.5 * self.logvar) + self.var = torch.exp(self.logvar) + if self.deterministic: + self.var = self.std = torch.zeros_like(self.mean).to(device=self.parameters.device) + + def sample(self): + x = self.mean + self.std * torch.randn(self.mean.shape).to(device=self.parameters.device) + return x + + def kl(self, other=None): + if self.deterministic: + return torch.Tensor([0.]) + else: + if other is None: + return 0.5 * torch.sum(torch.pow(self.mean, 2) + + self.var - 1.0 - self.logvar, + dim=[1, 2, 3]) + else: + return 0.5 * torch.sum( + torch.pow(self.mean - other.mean, 2) / other.var + + self.var / other.var - 1.0 - self.logvar + other.logvar, + dim=[1, 2, 3]) + + def nll(self, sample, dims=[1,2,3]): + if self.deterministic: + return torch.Tensor([0.]) + logtwopi = np.log(2.0 * np.pi) + return 0.5 * torch.sum( + logtwopi + self.logvar + torch.pow(sample - self.mean, 2) / self.var, + dim=dims) + + def mode(self): + return self.mean + + +def normal_kl(mean1, logvar1, mean2, logvar2): + """ + source: https://github.com/openai/guided-diffusion/blob/27c20a8fab9cb472df5d6bdd6c8d11c8f430b924/guided_diffusion/losses.py#L12 + Compute the KL divergence between two gaussians. + Shapes are automatically broadcasted, so batches can be compared to + scalars, among other use cases. + """ + tensor = None + for obj in (mean1, logvar1, mean2, logvar2): + if isinstance(obj, torch.Tensor): + tensor = obj + break + assert tensor is not None, "at least one argument must be a Tensor" + + # Force variances to be Tensors. Broadcasting helps convert scalars to + # Tensors, but it does not work for torch.exp(). + logvar1, logvar2 = [ + x if isinstance(x, torch.Tensor) else torch.tensor(x).to(tensor) + for x in (logvar1, logvar2) + ] + + return 0.5 * ( + -1.0 + + logvar2 + - logvar1 + + torch.exp(logvar1 - logvar2) + + ((mean1 - mean2) ** 2) * torch.exp(-logvar2) + ) diff --git a/ldm_patched/ldm/modules/ema.py b/ldm_patched/ldm/modules/ema.py new file mode 100644 index 0000000000000000000000000000000000000000..bded25019b9bcbcd0260f0b8185f8c7859ca58c4 --- /dev/null +++ b/ldm_patched/ldm/modules/ema.py @@ -0,0 +1,80 @@ +import torch +from torch import nn + + +class LitEma(nn.Module): + def __init__(self, model, decay=0.9999, use_num_upates=True): + super().__init__() + if decay < 0.0 or decay > 1.0: + raise ValueError('Decay must be between 0 and 1') + + self.m_name2s_name = {} + self.register_buffer('decay', torch.tensor(decay, dtype=torch.float32)) + self.register_buffer('num_updates', torch.tensor(0, dtype=torch.int) if use_num_upates + else torch.tensor(-1, dtype=torch.int)) + + for name, p in model.named_parameters(): + if p.requires_grad: + # remove as '.'-character is not allowed in buffers + s_name = name.replace('.', '') + self.m_name2s_name.update({name: s_name}) + self.register_buffer(s_name, p.clone().detach().data) + + self.collected_params = [] + + def reset_num_updates(self): + del self.num_updates + self.register_buffer('num_updates', torch.tensor(0, dtype=torch.int)) + + def forward(self, model): + decay = self.decay + + if self.num_updates >= 0: + self.num_updates += 1 + decay = min(self.decay, (1 + self.num_updates) / (10 + self.num_updates)) + + one_minus_decay = 1.0 - decay + + with torch.no_grad(): + m_param = dict(model.named_parameters()) + shadow_params = dict(self.named_buffers()) + + for key in m_param: + if m_param[key].requires_grad: + sname = self.m_name2s_name[key] + shadow_params[sname] = shadow_params[sname].type_as(m_param[key]) + shadow_params[sname].sub_(one_minus_decay * (shadow_params[sname] - m_param[key])) + else: + assert not key in self.m_name2s_name + + def copy_to(self, model): + m_param = dict(model.named_parameters()) + shadow_params = dict(self.named_buffers()) + for key in m_param: + if m_param[key].requires_grad: + m_param[key].data.copy_(shadow_params[self.m_name2s_name[key]].data) + else: + assert not key in self.m_name2s_name + + def store(self, parameters): + """ + Save the current parameters for restoring later. + Args: + parameters: Iterable of `torch.nn.Parameter`; the parameters to be + temporarily stored. + """ + self.collected_params = [param.clone() for param in parameters] + + def restore(self, parameters): + """ + Restore the parameters stored with the `store` method. + Useful to validate the model with EMA parameters without affecting the + original optimization process. Store the parameters before the + `copy_to` method. After validation (or model saving), use this to + restore the former parameters. + Args: + parameters: Iterable of `torch.nn.Parameter`; the parameters to be + updated with the stored parameters. + """ + for c_param, param in zip(self.collected_params, parameters): + param.data.copy_(c_param.data) diff --git a/ldm_patched/ldm/modules/encoders/__init__.py b/ldm_patched/ldm/modules/encoders/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ldm_patched/ldm/modules/encoders/__pycache__/__init__.cpython-310.pyc b/ldm_patched/ldm/modules/encoders/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..82ba55033e0ddf1c10ef596ec56a4a4a899320a8 Binary files /dev/null and b/ldm_patched/ldm/modules/encoders/__pycache__/__init__.cpython-310.pyc differ diff --git a/ldm_patched/ldm/modules/encoders/__pycache__/noise_aug_modules.cpython-310.pyc b/ldm_patched/ldm/modules/encoders/__pycache__/noise_aug_modules.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ec7b2f9a8c747d1b8a6965b139438b94fad9c1ad Binary files /dev/null and b/ldm_patched/ldm/modules/encoders/__pycache__/noise_aug_modules.cpython-310.pyc differ diff --git a/ldm_patched/ldm/modules/encoders/noise_aug_modules.py b/ldm_patched/ldm/modules/encoders/noise_aug_modules.py new file mode 100644 index 0000000000000000000000000000000000000000..a5d8660301636fde75808cba50afa539cf1162e0 --- /dev/null +++ b/ldm_patched/ldm/modules/encoders/noise_aug_modules.py @@ -0,0 +1,35 @@ +from ..diffusionmodules.upscaling import ImageConcatWithNoiseAugmentation +from ..diffusionmodules.openaimodel import Timestep +import torch + +class CLIPEmbeddingNoiseAugmentation(ImageConcatWithNoiseAugmentation): + def __init__(self, *args, clip_stats_path=None, timestep_dim=256, **kwargs): + super().__init__(*args, **kwargs) + if clip_stats_path is None: + clip_mean, clip_std = torch.zeros(timestep_dim), torch.ones(timestep_dim) + else: + clip_mean, clip_std = torch.load(clip_stats_path, map_location="cpu") + self.register_buffer("data_mean", clip_mean[None, :], persistent=False) + self.register_buffer("data_std", clip_std[None, :], persistent=False) + self.time_embed = Timestep(timestep_dim) + + def scale(self, x): + # re-normalize to centered mean and unit variance + x = (x - self.data_mean.to(x.device)) * 1. / self.data_std.to(x.device) + return x + + def unscale(self, x): + # back to original data stats + x = (x * self.data_std.to(x.device)) + self.data_mean.to(x.device) + return x + + def forward(self, x, noise_level=None, seed=None): + if noise_level is None: + noise_level = torch.randint(0, self.max_noise_level, (x.shape[0],), device=x.device).long() + else: + assert isinstance(noise_level, torch.Tensor) + x = self.scale(x) + z = self.q_sample(x, noise_level, seed=seed) + z = self.unscale(z) + noise_level = self.time_embed(noise_level) + return z, noise_level diff --git a/ldm_patched/ldm/modules/sub_quadratic_attention.py b/ldm_patched/ldm/modules/sub_quadratic_attention.py new file mode 100644 index 0000000000000000000000000000000000000000..9f4c23c7ecdac38ef4d7c200cb342a04f3fd0cfb --- /dev/null +++ b/ldm_patched/ldm/modules/sub_quadratic_attention.py @@ -0,0 +1,273 @@ +# original source: +# https://github.com/AminRezaei0x443/memory-efficient-attention/blob/1bc0d9e6ac5f82ea43a375135c4e1d3896ee1694/memory_efficient_attention/attention_torch.py +# license: +# MIT +# credit: +# Amin Rezaei (original author) +# Alex Birch (optimized algorithm for 3D tensors, at the expense of removing bias, masking and callbacks) +# implementation of: +# Self-attention Does Not Need O(n2) Memory": +# https://arxiv.org/abs/2112.05682v2 + +from functools import partial +import torch +from torch import Tensor +from torch.utils.checkpoint import checkpoint +import math + +try: + from typing import Optional, NamedTuple, List, Protocol +except ImportError: + from typing import Optional, NamedTuple, List + from typing_extensions import Protocol + +from torch import Tensor +from typing import List + +from ldm_patched.modules import model_management + +def dynamic_slice( + x: Tensor, + starts: List[int], + sizes: List[int], +) -> Tensor: + slicing = [slice(start, start + size) for start, size in zip(starts, sizes)] + return x[slicing] + +class AttnChunk(NamedTuple): + exp_values: Tensor + exp_weights_sum: Tensor + max_score: Tensor + +class SummarizeChunk(Protocol): + @staticmethod + def __call__( + query: Tensor, + key_t: Tensor, + value: Tensor, + ) -> AttnChunk: ... + +class ComputeQueryChunkAttn(Protocol): + @staticmethod + def __call__( + query: Tensor, + key_t: Tensor, + value: Tensor, + ) -> Tensor: ... + +def _summarize_chunk( + query: Tensor, + key_t: Tensor, + value: Tensor, + scale: float, + upcast_attention: bool, + mask, +) -> AttnChunk: + if upcast_attention: + with torch.autocast(enabled=False, device_type = 'cuda'): + query = query.float() + key_t = key_t.float() + attn_weights = torch.baddbmm( + torch.empty(1, 1, 1, device=query.device, dtype=query.dtype), + query, + key_t, + alpha=scale, + beta=0, + ) + else: + attn_weights = torch.baddbmm( + torch.empty(1, 1, 1, device=query.device, dtype=query.dtype), + query, + key_t, + alpha=scale, + beta=0, + ) + max_score, _ = torch.max(attn_weights, -1, keepdim=True) + max_score = max_score.detach() + attn_weights -= max_score + if mask is not None: + attn_weights += mask + torch.exp(attn_weights, out=attn_weights) + exp_weights = attn_weights.to(value.dtype) + exp_values = torch.bmm(exp_weights, value) + max_score = max_score.squeeze(-1) + return AttnChunk(exp_values, exp_weights.sum(dim=-1), max_score) + +def _query_chunk_attention( + query: Tensor, + key_t: Tensor, + value: Tensor, + summarize_chunk: SummarizeChunk, + kv_chunk_size: int, + mask, +) -> Tensor: + batch_x_heads, k_channels_per_head, k_tokens = key_t.shape + _, _, v_channels_per_head = value.shape + + def chunk_scanner(chunk_idx: int, mask) -> AttnChunk: + key_chunk = dynamic_slice( + key_t, + (0, 0, chunk_idx), + (batch_x_heads, k_channels_per_head, kv_chunk_size) + ) + value_chunk = dynamic_slice( + value, + (0, chunk_idx, 0), + (batch_x_heads, kv_chunk_size, v_channels_per_head) + ) + if mask is not None: + mask = mask[:,:,chunk_idx:chunk_idx + kv_chunk_size] + + return summarize_chunk(query, key_chunk, value_chunk, mask=mask) + + chunks: List[AttnChunk] = [ + chunk_scanner(chunk, mask) for chunk in torch.arange(0, k_tokens, kv_chunk_size) + ] + acc_chunk = AttnChunk(*map(torch.stack, zip(*chunks))) + chunk_values, chunk_weights, chunk_max = acc_chunk + + global_max, _ = torch.max(chunk_max, 0, keepdim=True) + max_diffs = torch.exp(chunk_max - global_max) + chunk_values *= torch.unsqueeze(max_diffs, -1) + chunk_weights *= max_diffs + + all_values = chunk_values.sum(dim=0) + all_weights = torch.unsqueeze(chunk_weights, -1).sum(dim=0) + return all_values / all_weights + +# TODO: refactor CrossAttention#get_attention_scores to share code with this +def _get_attention_scores_no_kv_chunking( + query: Tensor, + key_t: Tensor, + value: Tensor, + scale: float, + upcast_attention: bool, + mask, +) -> Tensor: + if upcast_attention: + with torch.autocast(enabled=False, device_type = 'cuda'): + query = query.float() + key_t = key_t.float() + attn_scores = torch.baddbmm( + torch.empty(1, 1, 1, device=query.device, dtype=query.dtype), + query, + key_t, + alpha=scale, + beta=0, + ) + else: + attn_scores = torch.baddbmm( + torch.empty(1, 1, 1, device=query.device, dtype=query.dtype), + query, + key_t, + alpha=scale, + beta=0, + ) + + if mask is not None: + attn_scores += mask + try: + attn_probs = attn_scores.softmax(dim=-1) + del attn_scores + except model_management.OOM_EXCEPTION: + print("ran out of memory while running softmax in _get_attention_scores_no_kv_chunking, trying slower in place softmax instead") + attn_scores -= attn_scores.max(dim=-1, keepdim=True).values + torch.exp(attn_scores, out=attn_scores) + summed = torch.sum(attn_scores, dim=-1, keepdim=True) + attn_scores /= summed + attn_probs = attn_scores + + hidden_states_slice = torch.bmm(attn_probs.to(value.dtype), value) + return hidden_states_slice + +class ScannedChunk(NamedTuple): + chunk_idx: int + attn_chunk: AttnChunk + +def efficient_dot_product_attention( + query: Tensor, + key_t: Tensor, + value: Tensor, + query_chunk_size=1024, + kv_chunk_size: Optional[int] = None, + kv_chunk_size_min: Optional[int] = None, + use_checkpoint=True, + upcast_attention=False, + mask = None, +): + """Computes efficient dot-product attention given query, transposed key, and value. + This is efficient version of attention presented in + https://arxiv.org/abs/2112.05682v2 which comes with O(sqrt(n)) memory requirements. + Args: + query: queries for calculating attention with shape of + `[batch * num_heads, tokens, channels_per_head]`. + key_t: keys for calculating attention with shape of + `[batch * num_heads, channels_per_head, tokens]`. + value: values to be used in attention with shape of + `[batch * num_heads, tokens, channels_per_head]`. + query_chunk_size: int: query chunks size + kv_chunk_size: Optional[int]: key/value chunks size. if None: defaults to sqrt(key_tokens) + kv_chunk_size_min: Optional[int]: key/value minimum chunk size. only considered when kv_chunk_size is None. changes `sqrt(key_tokens)` into `max(sqrt(key_tokens), kv_chunk_size_min)`, to ensure our chunk sizes don't get too small (smaller chunks = more chunks = less concurrent work done). + use_checkpoint: bool: whether to use checkpointing (recommended True for training, False for inference) + Returns: + Output of shape `[batch * num_heads, query_tokens, channels_per_head]`. + """ + batch_x_heads, q_tokens, q_channels_per_head = query.shape + _, _, k_tokens = key_t.shape + scale = q_channels_per_head ** -0.5 + + kv_chunk_size = min(kv_chunk_size or int(math.sqrt(k_tokens)), k_tokens) + if kv_chunk_size_min is not None: + kv_chunk_size = max(kv_chunk_size, kv_chunk_size_min) + + if mask is not None and len(mask.shape) == 2: + mask = mask.unsqueeze(0) + + def get_query_chunk(chunk_idx: int) -> Tensor: + return dynamic_slice( + query, + (0, chunk_idx, 0), + (batch_x_heads, min(query_chunk_size, q_tokens), q_channels_per_head) + ) + + def get_mask_chunk(chunk_idx: int) -> Tensor: + if mask is None: + return None + chunk = min(query_chunk_size, q_tokens) + return mask[:,chunk_idx:chunk_idx + chunk] + + summarize_chunk: SummarizeChunk = partial(_summarize_chunk, scale=scale, upcast_attention=upcast_attention) + summarize_chunk: SummarizeChunk = partial(checkpoint, summarize_chunk) if use_checkpoint else summarize_chunk + compute_query_chunk_attn: ComputeQueryChunkAttn = partial( + _get_attention_scores_no_kv_chunking, + scale=scale, + upcast_attention=upcast_attention + ) if k_tokens <= kv_chunk_size else ( + # fast-path for when there's just 1 key-value chunk per query chunk (this is just sliced attention btw) + partial( + _query_chunk_attention, + kv_chunk_size=kv_chunk_size, + summarize_chunk=summarize_chunk, + ) + ) + + if q_tokens <= query_chunk_size: + # fast-path for when there's just 1 query chunk + return compute_query_chunk_attn( + query=query, + key_t=key_t, + value=value, + mask=mask, + ) + + # TODO: maybe we should use torch.empty_like(query) to allocate storage in-advance, + # and pass slices to be mutated, instead of torch.cat()ing the returned slices + res = torch.cat([ + compute_query_chunk_attn( + query=get_query_chunk(i * query_chunk_size), + key_t=key_t, + value=value, + mask=get_mask_chunk(i * query_chunk_size) + ) for i in range(math.ceil(q_tokens / query_chunk_size)) + ], dim=1) + return res diff --git a/ldm_patched/ldm/modules/temporal_ae.py b/ldm_patched/ldm/modules/temporal_ae.py new file mode 100644 index 0000000000000000000000000000000000000000..ee851921187f44c63a2459ce8461347dc0f8d8d1 --- /dev/null +++ b/ldm_patched/ldm/modules/temporal_ae.py @@ -0,0 +1,245 @@ +import functools +from typing import Callable, Iterable, Union + +import torch +from einops import rearrange, repeat + +import ldm_patched.modules.ops +ops = ldm_patched.modules.ops.disable_weight_init + +from .diffusionmodules.model import ( + AttnBlock, + Decoder, + ResnetBlock, +) +from .diffusionmodules.openaimodel import ResBlock, timestep_embedding +from .attention import BasicTransformerBlock + +def partialclass(cls, *args, **kwargs): + class NewCls(cls): + __init__ = functools.partialmethod(cls.__init__, *args, **kwargs) + + return NewCls + + +class VideoResBlock(ResnetBlock): + def __init__( + self, + out_channels, + *args, + dropout=0.0, + video_kernel_size=3, + alpha=0.0, + merge_strategy="learned", + **kwargs, + ): + super().__init__(out_channels=out_channels, dropout=dropout, *args, **kwargs) + if video_kernel_size is None: + video_kernel_size = [3, 1, 1] + self.time_stack = ResBlock( + channels=out_channels, + emb_channels=0, + dropout=dropout, + dims=3, + use_scale_shift_norm=False, + use_conv=False, + up=False, + down=False, + kernel_size=video_kernel_size, + use_checkpoint=False, + skip_t_emb=True, + ) + + self.merge_strategy = merge_strategy + if self.merge_strategy == "fixed": + self.register_buffer("mix_factor", torch.Tensor([alpha])) + elif self.merge_strategy == "learned": + self.register_parameter( + "mix_factor", torch.nn.Parameter(torch.Tensor([alpha])) + ) + else: + raise ValueError(f"unknown merge strategy {self.merge_strategy}") + + def get_alpha(self, bs): + if self.merge_strategy == "fixed": + return self.mix_factor + elif self.merge_strategy == "learned": + return torch.sigmoid(self.mix_factor) + else: + raise NotImplementedError() + + def forward(self, x, temb, skip_video=False, timesteps=None): + b, c, h, w = x.shape + if timesteps is None: + timesteps = b + + x = super().forward(x, temb) + + if not skip_video: + x_mix = rearrange(x, "(b t) c h w -> b c t h w", t=timesteps) + + x = rearrange(x, "(b t) c h w -> b c t h w", t=timesteps) + + x = self.time_stack(x, temb) + + alpha = self.get_alpha(bs=b // timesteps).to(x.device) + x = alpha * x + (1.0 - alpha) * x_mix + + x = rearrange(x, "b c t h w -> (b t) c h w") + return x + + +class AE3DConv(ops.Conv2d): + def __init__(self, in_channels, out_channels, video_kernel_size=3, *args, **kwargs): + super().__init__(in_channels, out_channels, *args, **kwargs) + if isinstance(video_kernel_size, Iterable): + padding = [int(k // 2) for k in video_kernel_size] + else: + padding = int(video_kernel_size // 2) + + self.time_mix_conv = ops.Conv3d( + in_channels=out_channels, + out_channels=out_channels, + kernel_size=video_kernel_size, + padding=padding, + ) + + def forward(self, input, timesteps=None, skip_video=False): + if timesteps is None: + timesteps = input.shape[0] + x = super().forward(input) + if skip_video: + return x + x = rearrange(x, "(b t) c h w -> b c t h w", t=timesteps) + x = self.time_mix_conv(x) + return rearrange(x, "b c t h w -> (b t) c h w") + + +class AttnVideoBlock(AttnBlock): + def __init__( + self, in_channels: int, alpha: float = 0, merge_strategy: str = "learned" + ): + super().__init__(in_channels) + # no context, single headed, as in base class + self.time_mix_block = BasicTransformerBlock( + dim=in_channels, + n_heads=1, + d_head=in_channels, + checkpoint=False, + ff_in=True, + ) + + time_embed_dim = self.in_channels * 4 + self.video_time_embed = torch.nn.Sequential( + ops.Linear(self.in_channels, time_embed_dim), + torch.nn.SiLU(), + ops.Linear(time_embed_dim, self.in_channels), + ) + + self.merge_strategy = merge_strategy + if self.merge_strategy == "fixed": + self.register_buffer("mix_factor", torch.Tensor([alpha])) + elif self.merge_strategy == "learned": + self.register_parameter( + "mix_factor", torch.nn.Parameter(torch.Tensor([alpha])) + ) + else: + raise ValueError(f"unknown merge strategy {self.merge_strategy}") + + def forward(self, x, timesteps=None, skip_time_block=False): + if skip_time_block: + return super().forward(x) + + if timesteps is None: + timesteps = x.shape[0] + + x_in = x + x = self.attention(x) + h, w = x.shape[2:] + x = rearrange(x, "b c h w -> b (h w) c") + + x_mix = x + num_frames = torch.arange(timesteps, device=x.device) + num_frames = repeat(num_frames, "t -> b t", b=x.shape[0] // timesteps) + num_frames = rearrange(num_frames, "b t -> (b t)") + t_emb = timestep_embedding(num_frames, self.in_channels, repeat_only=False) + emb = self.video_time_embed(t_emb) # b, n_channels + emb = emb[:, None, :] + x_mix = x_mix + emb + + alpha = self.get_alpha().to(x.device) + x_mix = self.time_mix_block(x_mix, timesteps=timesteps) + x = alpha * x + (1.0 - alpha) * x_mix # alpha merge + + x = rearrange(x, "b (h w) c -> b c h w", h=h, w=w) + x = self.proj_out(x) + + return x_in + x + + def get_alpha( + self, + ): + if self.merge_strategy == "fixed": + return self.mix_factor + elif self.merge_strategy == "learned": + return torch.sigmoid(self.mix_factor) + else: + raise NotImplementedError(f"unknown merge strategy {self.merge_strategy}") + + + +def make_time_attn( + in_channels, + attn_type="vanilla", + attn_kwargs=None, + alpha: float = 0, + merge_strategy: str = "learned", +): + return partialclass( + AttnVideoBlock, in_channels, alpha=alpha, merge_strategy=merge_strategy + ) + + +class Conv2DWrapper(torch.nn.Conv2d): + def forward(self, input: torch.Tensor, **kwargs) -> torch.Tensor: + return super().forward(input) + + +class VideoDecoder(Decoder): + available_time_modes = ["all", "conv-only", "attn-only"] + + def __init__( + self, + *args, + video_kernel_size: Union[int, list] = 3, + alpha: float = 0.0, + merge_strategy: str = "learned", + time_mode: str = "conv-only", + **kwargs, + ): + self.video_kernel_size = video_kernel_size + self.alpha = alpha + self.merge_strategy = merge_strategy + self.time_mode = time_mode + assert ( + self.time_mode in self.available_time_modes + ), f"time_mode parameter has to be in {self.available_time_modes}" + + if self.time_mode != "attn-only": + kwargs["conv_out_op"] = partialclass(AE3DConv, video_kernel_size=self.video_kernel_size) + if self.time_mode not in ["conv-only", "only-last-conv"]: + kwargs["attn_op"] = partialclass(make_time_attn, alpha=self.alpha, merge_strategy=self.merge_strategy) + if self.time_mode not in ["attn-only", "only-last-conv"]: + kwargs["resnet_op"] = partialclass(VideoResBlock, video_kernel_size=self.video_kernel_size, alpha=self.alpha, merge_strategy=self.merge_strategy) + + super().__init__(*args, **kwargs) + + def get_last_layer(self, skip_time_mix=False, **kwargs): + if self.time_mode == "attn-only": + raise NotImplementedError("TODO") + else: + return ( + self.conv_out.time_mix_conv.weight + if not skip_time_mix + else self.conv_out.weight + ) diff --git a/ldm_patched/ldm/util.py b/ldm_patched/ldm/util.py new file mode 100644 index 0000000000000000000000000000000000000000..8c09ca1c72f7ceb3f9d7f9546aae5561baf62b13 --- /dev/null +++ b/ldm_patched/ldm/util.py @@ -0,0 +1,197 @@ +import importlib + +import torch +from torch import optim +import numpy as np + +from inspect import isfunction +from PIL import Image, ImageDraw, ImageFont + + +def log_txt_as_img(wh, xc, size=10): + # wh a tuple of (width, height) + # xc a list of captions to plot + b = len(xc) + txts = list() + for bi in range(b): + txt = Image.new("RGB", wh, color="white") + draw = ImageDraw.Draw(txt) + font = ImageFont.truetype('data/DejaVuSans.ttf', size=size) + nc = int(40 * (wh[0] / 256)) + lines = "\n".join(xc[bi][start:start + nc] for start in range(0, len(xc[bi]), nc)) + + try: + draw.text((0, 0), lines, fill="black", font=font) + except UnicodeEncodeError: + print("Cant encode string for logging. Skipping.") + + txt = np.array(txt).transpose(2, 0, 1) / 127.5 - 1.0 + txts.append(txt) + txts = np.stack(txts) + txts = torch.tensor(txts) + return txts + + +def ismap(x): + if not isinstance(x, torch.Tensor): + return False + return (len(x.shape) == 4) and (x.shape[1] > 3) + + +def isimage(x): + if not isinstance(x,torch.Tensor): + return False + return (len(x.shape) == 4) and (x.shape[1] == 3 or x.shape[1] == 1) + + +def exists(x): + return x is not None + + +def default(val, d): + if exists(val): + return val + return d() if isfunction(d) else d + + +def mean_flat(tensor): + """ + https://github.com/openai/guided-diffusion/blob/27c20a8fab9cb472df5d6bdd6c8d11c8f430b924/guided_diffusion/nn.py#L86 + Take the mean over all non-batch dimensions. + """ + return tensor.mean(dim=list(range(1, len(tensor.shape)))) + + +def count_params(model, verbose=False): + total_params = sum(p.numel() for p in model.parameters()) + if verbose: + print(f"{model.__class__.__name__} has {total_params*1.e-6:.2f} M params.") + return total_params + + +def instantiate_from_config(config): + if not "target" in config: + if config == '__is_first_stage__': + return None + elif config == "__is_unconditional__": + return None + raise KeyError("Expected key `target` to instantiate.") + return get_obj_from_str(config["target"])(**config.get("params", dict())) + + +def get_obj_from_str(string, reload=False): + module, cls = string.rsplit(".", 1) + if reload: + module_imp = importlib.import_module(module) + importlib.reload(module_imp) + return getattr(importlib.import_module(module, package=None), cls) + + +class AdamWwithEMAandWings(optim.Optimizer): + # credit to https://gist.github.com/crowsonkb/65f7265353f403714fce3b2595e0b298 + def __init__(self, params, lr=1.e-3, betas=(0.9, 0.999), eps=1.e-8, # TODO: check hyperparameters before using + weight_decay=1.e-2, amsgrad=False, ema_decay=0.9999, # ema decay to match previous code + ema_power=1., param_names=()): + """AdamW that saves EMA versions of the parameters.""" + if not 0.0 <= lr: + raise ValueError("Invalid learning rate: {}".format(lr)) + if not 0.0 <= eps: + raise ValueError("Invalid epsilon value: {}".format(eps)) + if not 0.0 <= betas[0] < 1.0: + raise ValueError("Invalid beta parameter at index 0: {}".format(betas[0])) + if not 0.0 <= betas[1] < 1.0: + raise ValueError("Invalid beta parameter at index 1: {}".format(betas[1])) + if not 0.0 <= weight_decay: + raise ValueError("Invalid weight_decay value: {}".format(weight_decay)) + if not 0.0 <= ema_decay <= 1.0: + raise ValueError("Invalid ema_decay value: {}".format(ema_decay)) + defaults = dict(lr=lr, betas=betas, eps=eps, + weight_decay=weight_decay, amsgrad=amsgrad, ema_decay=ema_decay, + ema_power=ema_power, param_names=param_names) + super().__init__(params, defaults) + + def __setstate__(self, state): + super().__setstate__(state) + for group in self.param_groups: + group.setdefault('amsgrad', False) + + @torch.no_grad() + def step(self, closure=None): + """Performs a single optimization step. + Args: + closure (callable, optional): A closure that reevaluates the model + and returns the loss. + """ + loss = None + if closure is not None: + with torch.enable_grad(): + loss = closure() + + for group in self.param_groups: + params_with_grad = [] + grads = [] + exp_avgs = [] + exp_avg_sqs = [] + ema_params_with_grad = [] + state_sums = [] + max_exp_avg_sqs = [] + state_steps = [] + amsgrad = group['amsgrad'] + beta1, beta2 = group['betas'] + ema_decay = group['ema_decay'] + ema_power = group['ema_power'] + + for p in group['params']: + if p.grad is None: + continue + params_with_grad.append(p) + if p.grad.is_sparse: + raise RuntimeError('AdamW does not support sparse gradients') + grads.append(p.grad) + + state = self.state[p] + + # State initialization + if len(state) == 0: + state['step'] = 0 + # Exponential moving average of gradient values + state['exp_avg'] = torch.zeros_like(p, memory_format=torch.preserve_format) + # Exponential moving average of squared gradient values + state['exp_avg_sq'] = torch.zeros_like(p, memory_format=torch.preserve_format) + if amsgrad: + # Maintains max of all exp. moving avg. of sq. grad. values + state['max_exp_avg_sq'] = torch.zeros_like(p, memory_format=torch.preserve_format) + # Exponential moving average of parameter values + state['param_exp_avg'] = p.detach().float().clone() + + exp_avgs.append(state['exp_avg']) + exp_avg_sqs.append(state['exp_avg_sq']) + ema_params_with_grad.append(state['param_exp_avg']) + + if amsgrad: + max_exp_avg_sqs.append(state['max_exp_avg_sq']) + + # update the steps for each param group update + state['step'] += 1 + # record the step after step update + state_steps.append(state['step']) + + optim._functional.adamw(params_with_grad, + grads, + exp_avgs, + exp_avg_sqs, + max_exp_avg_sqs, + state_steps, + amsgrad=amsgrad, + beta1=beta1, + beta2=beta2, + lr=group['lr'], + weight_decay=group['weight_decay'], + eps=group['eps'], + maximize=False) + + cur_ema_decay = min(ema_decay, 1 - state['step'] ** -ema_power) + for param, ema_param in zip(params_with_grad, ema_params_with_grad): + ema_param.mul_(cur_ema_decay).add_(param.float(), alpha=1 - cur_ema_decay) + + return loss \ No newline at end of file diff --git a/ldm_patched/licenses-3rd/chainer b/ldm_patched/licenses-3rd/chainer new file mode 100644 index 0000000000000000000000000000000000000000..db8ef9d966d9b2d89f1b060912806b45797d8c14 --- /dev/null +++ b/ldm_patched/licenses-3rd/chainer @@ -0,0 +1,20 @@ +Copyright (c) 2015 Preferred Infrastructure, Inc. +Copyright (c) 2015 Preferred Networks, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/ldm_patched/licenses-3rd/comfyui b/ldm_patched/licenses-3rd/comfyui new file mode 100644 index 0000000000000000000000000000000000000000..e72bfddabc15be5718a7cc061ac10e47741d8219 --- /dev/null +++ b/ldm_patched/licenses-3rd/comfyui @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. \ No newline at end of file diff --git a/ldm_patched/licenses-3rd/diffusers b/ldm_patched/licenses-3rd/diffusers new file mode 100644 index 0000000000000000000000000000000000000000..f49a4e16e68b128803cc2dcea614603632b04eac --- /dev/null +++ b/ldm_patched/licenses-3rd/diffusers @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/ldm_patched/licenses-3rd/kdiffusion b/ldm_patched/licenses-3rd/kdiffusion new file mode 100644 index 0000000000000000000000000000000000000000..e20684e521b3f0ce86ed26c2fc95ed665f52e05f --- /dev/null +++ b/ldm_patched/licenses-3rd/kdiffusion @@ -0,0 +1,19 @@ +Copyright (c) 2022 Katherine Crowson + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/ldm_patched/licenses-3rd/ldm b/ldm_patched/licenses-3rd/ldm new file mode 100644 index 0000000000000000000000000000000000000000..1a1c5058de8a8817b786ad440e9984c99e7df0ca --- /dev/null +++ b/ldm_patched/licenses-3rd/ldm @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 Machine Vision and Learning Group, LMU Munich + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/ldm_patched/licenses-3rd/taesd b/ldm_patched/licenses-3rd/taesd new file mode 100644 index 0000000000000000000000000000000000000000..62e6312e5e8ca8669aa15c250f1d40c001cd7f20 --- /dev/null +++ b/ldm_patched/licenses-3rd/taesd @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2023 Ollin Boer Bohan + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/ldm_patched/licenses-3rd/transformers b/ldm_patched/licenses-3rd/transformers new file mode 100644 index 0000000000000000000000000000000000000000..e44d8f5b79a0643c99977835611e1da9d08fc3cf --- /dev/null +++ b/ldm_patched/licenses-3rd/transformers @@ -0,0 +1,203 @@ +Copyright 2018- The Hugging Face team. All rights reserved. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/ldm_patched/modules/__pycache__/args_parser.cpython-310.pyc b/ldm_patched/modules/__pycache__/args_parser.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9694d5ff8500e453147e3ee77bbb87721a3c366f Binary files /dev/null and b/ldm_patched/modules/__pycache__/args_parser.cpython-310.pyc differ diff --git a/ldm_patched/modules/__pycache__/args_parser.cpython-312.pyc b/ldm_patched/modules/__pycache__/args_parser.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7af14558d237223a95e96b1ae1706ae978b0d97a Binary files /dev/null and b/ldm_patched/modules/__pycache__/args_parser.cpython-312.pyc differ diff --git a/ldm_patched/modules/__pycache__/checkpoint_pickle.cpython-310.pyc b/ldm_patched/modules/__pycache__/checkpoint_pickle.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..832a962a1d0b2059f8f4304bb5dc66a3521edfe7 Binary files /dev/null and b/ldm_patched/modules/__pycache__/checkpoint_pickle.cpython-310.pyc differ diff --git a/ldm_patched/modules/__pycache__/clip_model.cpython-310.pyc b/ldm_patched/modules/__pycache__/clip_model.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b79ceec15a8bbdf98950857be1680b1451c9cbb2 Binary files /dev/null and b/ldm_patched/modules/__pycache__/clip_model.cpython-310.pyc differ diff --git a/ldm_patched/modules/__pycache__/clip_vision.cpython-310.pyc b/ldm_patched/modules/__pycache__/clip_vision.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..50c256948bd23ad658e9d9ad7abfae351163497d Binary files /dev/null and b/ldm_patched/modules/__pycache__/clip_vision.cpython-310.pyc differ diff --git a/ldm_patched/modules/__pycache__/conds.cpython-310.pyc b/ldm_patched/modules/__pycache__/conds.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..87dd1d5107c93897be74678080fcafd4992e7b4b Binary files /dev/null and b/ldm_patched/modules/__pycache__/conds.cpython-310.pyc differ diff --git a/ldm_patched/modules/__pycache__/controlnet.cpython-310.pyc b/ldm_patched/modules/__pycache__/controlnet.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2b501cb4af6fff5fad6f1b18b277e756b18b495d Binary files /dev/null and b/ldm_patched/modules/__pycache__/controlnet.cpython-310.pyc differ diff --git a/ldm_patched/modules/__pycache__/diffusers_convert.cpython-310.pyc b/ldm_patched/modules/__pycache__/diffusers_convert.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..33de49e9880bd81336dbbda93d3c006f7ce46a22 Binary files /dev/null and b/ldm_patched/modules/__pycache__/diffusers_convert.cpython-310.pyc differ diff --git a/ldm_patched/modules/__pycache__/diffusers_load.cpython-310.pyc b/ldm_patched/modules/__pycache__/diffusers_load.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..be44eb7e230067557a4bc55dddc4754e81aa74c0 Binary files /dev/null and b/ldm_patched/modules/__pycache__/diffusers_load.cpython-310.pyc differ diff --git a/ldm_patched/modules/__pycache__/gligen.cpython-310.pyc b/ldm_patched/modules/__pycache__/gligen.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1601c4a884fb3a501a65ce78f2c9b6e0bdf52efb Binary files /dev/null and b/ldm_patched/modules/__pycache__/gligen.cpython-310.pyc differ diff --git a/ldm_patched/modules/__pycache__/latent_formats.cpython-310.pyc b/ldm_patched/modules/__pycache__/latent_formats.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..227672a005c77f0803375b9c75ead1338dd606e8 Binary files /dev/null and b/ldm_patched/modules/__pycache__/latent_formats.cpython-310.pyc differ diff --git a/ldm_patched/modules/__pycache__/lora.cpython-310.pyc b/ldm_patched/modules/__pycache__/lora.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b44951996ff188efd484b58363ae94cad822a86b Binary files /dev/null and b/ldm_patched/modules/__pycache__/lora.cpython-310.pyc differ diff --git a/ldm_patched/modules/__pycache__/model_base.cpython-310.pyc b/ldm_patched/modules/__pycache__/model_base.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6e3f837685a0924c4ed5d9348f489491b2a2b55e Binary files /dev/null and b/ldm_patched/modules/__pycache__/model_base.cpython-310.pyc differ diff --git a/ldm_patched/modules/__pycache__/model_detection.cpython-310.pyc b/ldm_patched/modules/__pycache__/model_detection.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d7bc4549fd52e43837215e2487379eefd4ce8657 Binary files /dev/null and b/ldm_patched/modules/__pycache__/model_detection.cpython-310.pyc differ diff --git a/ldm_patched/modules/__pycache__/model_management.cpython-310.pyc b/ldm_patched/modules/__pycache__/model_management.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1a4c291eacde883d727031cce4984435cadd8e6f Binary files /dev/null and b/ldm_patched/modules/__pycache__/model_management.cpython-310.pyc differ diff --git a/ldm_patched/modules/__pycache__/model_patcher.cpython-310.pyc b/ldm_patched/modules/__pycache__/model_patcher.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e2f2864071ac5aeaf187ea1a27120c05b27acc9b Binary files /dev/null and b/ldm_patched/modules/__pycache__/model_patcher.cpython-310.pyc differ diff --git a/ldm_patched/modules/__pycache__/model_sampling.cpython-310.pyc b/ldm_patched/modules/__pycache__/model_sampling.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fef1382d08672b4e522d053c8f5a9859ce9fcde9 Binary files /dev/null and b/ldm_patched/modules/__pycache__/model_sampling.cpython-310.pyc differ diff --git a/ldm_patched/modules/__pycache__/ops.cpython-310.pyc b/ldm_patched/modules/__pycache__/ops.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..622f356e758b4c2d4716ad6a2e7df16d8b5412bd Binary files /dev/null and b/ldm_patched/modules/__pycache__/ops.cpython-310.pyc differ diff --git a/ldm_patched/modules/__pycache__/options.cpython-310.pyc b/ldm_patched/modules/__pycache__/options.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bb98468d70be82e3d0d7e9737c1f6b00cb23b250 Binary files /dev/null and b/ldm_patched/modules/__pycache__/options.cpython-310.pyc differ diff --git a/ldm_patched/modules/__pycache__/options.cpython-312.pyc b/ldm_patched/modules/__pycache__/options.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a4bd149fb850c6cdb2248889b30a4e4f3b308239 Binary files /dev/null and b/ldm_patched/modules/__pycache__/options.cpython-312.pyc differ diff --git a/ldm_patched/modules/__pycache__/sample.cpython-310.pyc b/ldm_patched/modules/__pycache__/sample.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fdd40d094dbbb2ecb1c212242d0eb5133493fcf4 Binary files /dev/null and b/ldm_patched/modules/__pycache__/sample.cpython-310.pyc differ diff --git a/ldm_patched/modules/__pycache__/samplers.cpython-310.pyc b/ldm_patched/modules/__pycache__/samplers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fe62806554892cd09fdb6e5e74c36ca941c81431 Binary files /dev/null and b/ldm_patched/modules/__pycache__/samplers.cpython-310.pyc differ diff --git a/ldm_patched/modules/__pycache__/sd.cpython-310.pyc b/ldm_patched/modules/__pycache__/sd.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..03208737340fd886caeb7b280eccf0372877ed4b Binary files /dev/null and b/ldm_patched/modules/__pycache__/sd.cpython-310.pyc differ diff --git a/ldm_patched/modules/__pycache__/sd1_clip.cpython-310.pyc b/ldm_patched/modules/__pycache__/sd1_clip.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8786064510c44ccb96a75767c20ebb416d4019b5 Binary files /dev/null and b/ldm_patched/modules/__pycache__/sd1_clip.cpython-310.pyc differ diff --git a/ldm_patched/modules/__pycache__/sd2_clip.cpython-310.pyc b/ldm_patched/modules/__pycache__/sd2_clip.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..88ac6d1add66afbd76e2a70d62eeacb9890f1d7e Binary files /dev/null and b/ldm_patched/modules/__pycache__/sd2_clip.cpython-310.pyc differ diff --git a/ldm_patched/modules/__pycache__/sdxl_clip.cpython-310.pyc b/ldm_patched/modules/__pycache__/sdxl_clip.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8bbb9ba3c8dbd71732612a1027db679c844dfd72 Binary files /dev/null and b/ldm_patched/modules/__pycache__/sdxl_clip.cpython-310.pyc differ diff --git a/ldm_patched/modules/__pycache__/supported_models.cpython-310.pyc b/ldm_patched/modules/__pycache__/supported_models.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f8bd4705255d485041891977b787f37b24daaa72 Binary files /dev/null and b/ldm_patched/modules/__pycache__/supported_models.cpython-310.pyc differ diff --git a/ldm_patched/modules/__pycache__/supported_models_base.cpython-310.pyc b/ldm_patched/modules/__pycache__/supported_models_base.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..606b448b8d44c252f860a7366edaf96d2a0eed32 Binary files /dev/null and b/ldm_patched/modules/__pycache__/supported_models_base.cpython-310.pyc differ diff --git a/ldm_patched/modules/__pycache__/utils.cpython-310.pyc b/ldm_patched/modules/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c41f74cc68534fd7233e9bb8f87aa2b092b1ffaf Binary files /dev/null and b/ldm_patched/modules/__pycache__/utils.cpython-310.pyc differ diff --git a/ldm_patched/modules/args_parser.py b/ldm_patched/modules/args_parser.py new file mode 100644 index 0000000000000000000000000000000000000000..0c6165a7b9a89cede4ba450154f4f902a998017b --- /dev/null +++ b/ldm_patched/modules/args_parser.py @@ -0,0 +1,125 @@ +import argparse +import enum +import ldm_patched.modules.options + +class EnumAction(argparse.Action): + """ + Argparse action for handling Enums + """ + def __init__(self, **kwargs): + # Pop off the type value + enum_type = kwargs.pop("type", None) + + # Ensure an Enum subclass is provided + if enum_type is None: + raise ValueError("type must be assigned an Enum when using EnumAction") + if not issubclass(enum_type, enum.Enum): + raise TypeError("type must be an Enum when using EnumAction") + + # Generate choices from the Enum + choices = tuple(e.value for e in enum_type) + kwargs.setdefault("choices", choices) + kwargs.setdefault("metavar", f"[{','.join(list(choices))}]") + + super(EnumAction, self).__init__(**kwargs) + + self._enum = enum_type + + def __call__(self, parser, namespace, values, option_string=None): + # Convert value back into an Enum + value = self._enum(values) + setattr(namespace, self.dest, value) + + +parser = argparse.ArgumentParser() + +parser.add_argument("--listen", type=str, default="127.0.0.1", metavar="IP", nargs="?", const="0.0.0.0") +parser.add_argument("--port", type=int, default=8188) +parser.add_argument("--disable-header-check", type=str, default=None, metavar="ORIGIN", nargs="?", const="*") +parser.add_argument("--web-upload-size", type=float, default=100) + +parser.add_argument("--external-working-path", type=str, default=None, metavar="PATH", nargs='+', action='append') +parser.add_argument("--output-path", type=str, default=None) +parser.add_argument("--temp-path", type=str, default=None) +parser.add_argument("--cache-path", type=str, default=None) +parser.add_argument("--in-browser", action="store_true") +parser.add_argument("--disable-in-browser", action="store_true") +parser.add_argument("--gpu-device-id", type=int, default=None, metavar="DEVICE_ID") +cm_group = parser.add_mutually_exclusive_group() +cm_group.add_argument("--async-cuda-allocation", action="store_true") +cm_group.add_argument("--disable-async-cuda-allocation", action="store_true") + +parser.add_argument("--disable-attention-upcast", action="store_true") + +fp_group = parser.add_mutually_exclusive_group() +fp_group.add_argument("--all-in-fp32", action="store_true") +fp_group.add_argument("--all-in-fp16", action="store_true") + +fpunet_group = parser.add_mutually_exclusive_group() +fpunet_group.add_argument("--unet-in-bf16", action="store_true") +fpunet_group.add_argument("--unet-in-fp16", action="store_true") +fpunet_group.add_argument("--unet-in-fp8-e4m3fn", action="store_true") +fpunet_group.add_argument("--unet-in-fp8-e5m2", action="store_true") + +fpvae_group = parser.add_mutually_exclusive_group() +fpvae_group.add_argument("--vae-in-fp16", action="store_true") +fpvae_group.add_argument("--vae-in-fp32", action="store_true") +fpvae_group.add_argument("--vae-in-bf16", action="store_true") + +parser.add_argument("--vae-in-cpu", action="store_true") + +fpte_group = parser.add_mutually_exclusive_group() +fpte_group.add_argument("--clip-in-fp8-e4m3fn", action="store_true") +fpte_group.add_argument("--clip-in-fp8-e5m2", action="store_true") +fpte_group.add_argument("--clip-in-fp16", action="store_true") +fpte_group.add_argument("--clip-in-fp32", action="store_true") + + +parser.add_argument("--directml", type=int, nargs="?", metavar="DIRECTML_DEVICE", const=-1) + +parser.add_argument("--disable-ipex-hijack", action="store_true") + +class LatentPreviewMethod(enum.Enum): + NoPreviews = "none" + Auto = "auto" + Latent2RGB = "fast" + TAESD = "taesd" + +parser.add_argument("--preview-option", type=LatentPreviewMethod, default=LatentPreviewMethod.NoPreviews, action=EnumAction) + +attn_group = parser.add_mutually_exclusive_group() +attn_group.add_argument("--attention-split", action="store_true") +attn_group.add_argument("--attention-quad", action="store_true") +attn_group.add_argument("--attention-pytorch", action="store_true") + +parser.add_argument("--disable-xformers", action="store_true") + +vram_group = parser.add_mutually_exclusive_group() +vram_group.add_argument("--always-gpu", action="store_true") +vram_group.add_argument("--always-high-vram", action="store_true") +vram_group.add_argument("--always-normal-vram", action="store_true") +vram_group.add_argument("--always-low-vram", action="store_true") +vram_group.add_argument("--always-no-vram", action="store_true") +vram_group.add_argument("--always-cpu", type=int, nargs="?", metavar="CPU_NUM_THREADS", const=-1) + +parser.add_argument("--always-offload-from-vram", action="store_true") +parser.add_argument("--pytorch-deterministic", action="store_true") + +parser.add_argument("--disable-server-log", action="store_true") +parser.add_argument("--debug-mode", action="store_true") +parser.add_argument("--is-windows-embedded-python", action="store_true") + +parser.add_argument("--disable-server-info", action="store_true") + +parser.add_argument("--multi-user", action="store_true") + +if ldm_patched.modules.options.args_parsing: + args = parser.parse_args([]) +else: + args = parser.parse_args([]) + +if args.is_windows_embedded_python: + args.in_browser = True + +if args.disable_in_browser: + args.in_browser = False diff --git a/ldm_patched/modules/checkpoint_pickle.py b/ldm_patched/modules/checkpoint_pickle.py new file mode 100644 index 0000000000000000000000000000000000000000..206551d3c1cf0d654c907534629a800196ba138b --- /dev/null +++ b/ldm_patched/modules/checkpoint_pickle.py @@ -0,0 +1,13 @@ +import pickle + +load = pickle.load + +class Empty: + pass + +class Unpickler(pickle.Unpickler): + def find_class(self, module, name): + #TODO: safe unpickle + if module.startswith("pytorch_lightning"): + return Empty + return super().find_class(module, name) diff --git a/ldm_patched/modules/clip_config_bigg.json b/ldm_patched/modules/clip_config_bigg.json new file mode 100644 index 0000000000000000000000000000000000000000..32d82ff39ba66ba0be15ec101993e1c46cc3f7ab --- /dev/null +++ b/ldm_patched/modules/clip_config_bigg.json @@ -0,0 +1,23 @@ +{ + "architectures": [ + "CLIPTextModel" + ], + "attention_dropout": 0.0, + "bos_token_id": 0, + "dropout": 0.0, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_size": 1280, + "initializer_factor": 1.0, + "initializer_range": 0.02, + "intermediate_size": 5120, + "layer_norm_eps": 1e-05, + "max_position_embeddings": 77, + "model_type": "clip_text_model", + "num_attention_heads": 20, + "num_hidden_layers": 32, + "pad_token_id": 1, + "projection_dim": 1280, + "torch_dtype": "float32", + "vocab_size": 49408 +} diff --git a/ldm_patched/modules/clip_model.py b/ldm_patched/modules/clip_model.py new file mode 100644 index 0000000000000000000000000000000000000000..aceca86d641b6fcf116b1b57da232cc7336c566d --- /dev/null +++ b/ldm_patched/modules/clip_model.py @@ -0,0 +1,188 @@ +import torch +from ldm_patched.ldm.modules.attention import optimized_attention_for_device + +class CLIPAttention(torch.nn.Module): + def __init__(self, embed_dim, heads, dtype, device, operations): + super().__init__() + + self.heads = heads + self.q_proj = operations.Linear(embed_dim, embed_dim, bias=True, dtype=dtype, device=device) + self.k_proj = operations.Linear(embed_dim, embed_dim, bias=True, dtype=dtype, device=device) + self.v_proj = operations.Linear(embed_dim, embed_dim, bias=True, dtype=dtype, device=device) + + self.out_proj = operations.Linear(embed_dim, embed_dim, bias=True, dtype=dtype, device=device) + + def forward(self, x, mask=None, optimized_attention=None): + q = self.q_proj(x) + k = self.k_proj(x) + v = self.v_proj(x) + + out = optimized_attention(q, k, v, self.heads, mask) + return self.out_proj(out) + +ACTIVATIONS = {"quick_gelu": lambda a: a * torch.sigmoid(1.702 * a), + "gelu": torch.nn.functional.gelu, +} + +class CLIPMLP(torch.nn.Module): + def __init__(self, embed_dim, intermediate_size, activation, dtype, device, operations): + super().__init__() + self.fc1 = operations.Linear(embed_dim, intermediate_size, bias=True, dtype=dtype, device=device) + self.activation = ACTIVATIONS[activation] + self.fc2 = operations.Linear(intermediate_size, embed_dim, bias=True, dtype=dtype, device=device) + + def forward(self, x): + x = self.fc1(x) + x = self.activation(x) + x = self.fc2(x) + return x + +class CLIPLayer(torch.nn.Module): + def __init__(self, embed_dim, heads, intermediate_size, intermediate_activation, dtype, device, operations): + super().__init__() + self.layer_norm1 = operations.LayerNorm(embed_dim, dtype=dtype, device=device) + self.self_attn = CLIPAttention(embed_dim, heads, dtype, device, operations) + self.layer_norm2 = operations.LayerNorm(embed_dim, dtype=dtype, device=device) + self.mlp = CLIPMLP(embed_dim, intermediate_size, intermediate_activation, dtype, device, operations) + + def forward(self, x, mask=None, optimized_attention=None): + x += self.self_attn(self.layer_norm1(x), mask, optimized_attention) + x += self.mlp(self.layer_norm2(x)) + return x + + +class CLIPEncoder(torch.nn.Module): + def __init__(self, num_layers, embed_dim, heads, intermediate_size, intermediate_activation, dtype, device, operations): + super().__init__() + self.layers = torch.nn.ModuleList([CLIPLayer(embed_dim, heads, intermediate_size, intermediate_activation, dtype, device, operations) for i in range(num_layers)]) + + def forward(self, x, mask=None, intermediate_output=None): + optimized_attention = optimized_attention_for_device(x.device, mask=mask is not None, small_input=True) + + if intermediate_output is not None: + if intermediate_output < 0: + intermediate_output = len(self.layers) + intermediate_output + + intermediate = None + for i, l in enumerate(self.layers): + x = l(x, mask, optimized_attention) + if i == intermediate_output: + intermediate = x.clone() + return x, intermediate + +class CLIPEmbeddings(torch.nn.Module): + def __init__(self, embed_dim, vocab_size=49408, num_positions=77, dtype=None, device=None): + super().__init__() + self.token_embedding = torch.nn.Embedding(vocab_size, embed_dim, dtype=dtype, device=device) + self.position_embedding = torch.nn.Embedding(num_positions, embed_dim, dtype=dtype, device=device) + + def forward(self, input_tokens): + return self.token_embedding(input_tokens) + self.position_embedding.weight + + +class CLIPTextModel_(torch.nn.Module): + def __init__(self, config_dict, dtype, device, operations): + num_layers = config_dict["num_hidden_layers"] + embed_dim = config_dict["hidden_size"] + heads = config_dict["num_attention_heads"] + intermediate_size = config_dict["intermediate_size"] + intermediate_activation = config_dict["hidden_act"] + + super().__init__() + self.embeddings = CLIPEmbeddings(embed_dim, dtype=torch.float32, device=device) + self.encoder = CLIPEncoder(num_layers, embed_dim, heads, intermediate_size, intermediate_activation, dtype, device, operations) + self.final_layer_norm = operations.LayerNorm(embed_dim, dtype=dtype, device=device) + + def forward(self, input_tokens, attention_mask=None, intermediate_output=None, final_layer_norm_intermediate=True): + x = self.embeddings(input_tokens) + mask = None + if attention_mask is not None: + mask = 1.0 - attention_mask.to(x.dtype).unsqueeze(1).unsqueeze(1).expand(attention_mask.shape[0], 1, attention_mask.shape[-1], attention_mask.shape[-1]) + mask = mask.masked_fill(mask.to(torch.bool), float("-inf")) + + causal_mask = torch.empty(x.shape[1], x.shape[1], dtype=x.dtype, device=x.device).fill_(float("-inf")).triu_(1) + if mask is not None: + mask += causal_mask + else: + mask = causal_mask + + x, i = self.encoder(x, mask=mask, intermediate_output=intermediate_output) + x = self.final_layer_norm(x) + if i is not None and final_layer_norm_intermediate: + i = self.final_layer_norm(i) + + pooled_output = x[torch.arange(x.shape[0], device=x.device), input_tokens.to(dtype=torch.int, device=x.device).argmax(dim=-1),] + return x, i, pooled_output + +class CLIPTextModel(torch.nn.Module): + def __init__(self, config_dict, dtype, device, operations): + super().__init__() + self.num_layers = config_dict["num_hidden_layers"] + self.text_model = CLIPTextModel_(config_dict, dtype, device, operations) + self.dtype = dtype + + def get_input_embeddings(self): + return self.text_model.embeddings.token_embedding + + def set_input_embeddings(self, embeddings): + self.text_model.embeddings.token_embedding = embeddings + + def forward(self, *args, **kwargs): + return self.text_model(*args, **kwargs) + +class CLIPVisionEmbeddings(torch.nn.Module): + def __init__(self, embed_dim, num_channels=3, patch_size=14, image_size=224, dtype=None, device=None, operations=None): + super().__init__() + self.class_embedding = torch.nn.Parameter(torch.empty(embed_dim, dtype=dtype, device=device)) + + self.patch_embedding = operations.Conv2d( + in_channels=num_channels, + out_channels=embed_dim, + kernel_size=patch_size, + stride=patch_size, + bias=False, + dtype=dtype, + device=device + ) + + num_patches = (image_size // patch_size) ** 2 + num_positions = num_patches + 1 + self.position_embedding = torch.nn.Embedding(num_positions, embed_dim, dtype=dtype, device=device) + + def forward(self, pixel_values): + embeds = self.patch_embedding(pixel_values).flatten(2).transpose(1, 2) + return torch.cat([self.class_embedding.to(embeds.device).expand(pixel_values.shape[0], 1, -1), embeds], dim=1) + self.position_embedding.weight.to(embeds.device) + + +class CLIPVision(torch.nn.Module): + def __init__(self, config_dict, dtype, device, operations): + super().__init__() + num_layers = config_dict["num_hidden_layers"] + embed_dim = config_dict["hidden_size"] + heads = config_dict["num_attention_heads"] + intermediate_size = config_dict["intermediate_size"] + intermediate_activation = config_dict["hidden_act"] + + self.embeddings = CLIPVisionEmbeddings(embed_dim, config_dict["num_channels"], config_dict["patch_size"], config_dict["image_size"], dtype=torch.float32, device=device, operations=operations) + self.pre_layrnorm = operations.LayerNorm(embed_dim) + self.encoder = CLIPEncoder(num_layers, embed_dim, heads, intermediate_size, intermediate_activation, dtype, device, operations) + self.post_layernorm = operations.LayerNorm(embed_dim) + + def forward(self, pixel_values, attention_mask=None, intermediate_output=None): + x = self.embeddings(pixel_values) + x = self.pre_layrnorm(x) + #TODO: attention_mask? + x, i = self.encoder(x, mask=None, intermediate_output=intermediate_output) + pooled_output = self.post_layernorm(x[:, 0, :]) + return x, i, pooled_output + +class CLIPVisionModelProjection(torch.nn.Module): + def __init__(self, config_dict, dtype, device, operations): + super().__init__() + self.vision_model = CLIPVision(config_dict, dtype, device, operations) + self.visual_projection = operations.Linear(config_dict["hidden_size"], config_dict["projection_dim"], bias=False) + + def forward(self, *args, **kwargs): + x = self.vision_model(*args, **kwargs) + out = self.visual_projection(x[2]) + return (x[0], x[1], out) diff --git a/ldm_patched/modules/clip_vision.py b/ldm_patched/modules/clip_vision.py new file mode 100644 index 0000000000000000000000000000000000000000..affdb8b24e35eb64aad2f96de36a59c6d0d67b5d --- /dev/null +++ b/ldm_patched/modules/clip_vision.py @@ -0,0 +1,116 @@ +from .utils import load_torch_file, transformers_convert, state_dict_prefix_replace +import os +import torch +import json + +import ldm_patched.modules.ops +import ldm_patched.modules.model_patcher +import ldm_patched.modules.model_management +import ldm_patched.modules.utils +import ldm_patched.modules.clip_model + +class Output: + def __getitem__(self, key): + return getattr(self, key) + def __setitem__(self, key, item): + setattr(self, key, item) + +def clip_preprocess(image, size=224): + mean = torch.tensor([ 0.48145466,0.4578275,0.40821073], device=image.device, dtype=image.dtype) + std = torch.tensor([0.26862954,0.26130258,0.27577711], device=image.device, dtype=image.dtype) + image = image.movedim(-1, 1) + if not (image.shape[2] == size and image.shape[3] == size): + scale = (size / min(image.shape[2], image.shape[3])) + image = torch.nn.functional.interpolate(image, size=(round(scale * image.shape[2]), round(scale * image.shape[3])), mode="bicubic", antialias=True) + h = (image.shape[2] - size)//2 + w = (image.shape[3] - size)//2 + image = image[:,:,h:h+size,w:w+size] + image = torch.clip((255. * image), 0, 255).round() / 255.0 + return (image - mean.view([3,1,1])) / std.view([3,1,1]) + +class ClipVisionModel(): + def __init__(self, json_config): + with open(json_config) as f: + config = json.load(f) + + self.load_device = ldm_patched.modules.model_management.text_encoder_device() + offload_device = ldm_patched.modules.model_management.text_encoder_offload_device() + self.dtype = ldm_patched.modules.model_management.text_encoder_dtype(self.load_device) + self.model = ldm_patched.modules.clip_model.CLIPVisionModelProjection(config, self.dtype, offload_device, ldm_patched.modules.ops.manual_cast) + self.model.eval() + + self.patcher = ldm_patched.modules.model_patcher.ModelPatcher(self.model, load_device=self.load_device, offload_device=offload_device) + + def load_sd(self, sd): + return self.model.load_state_dict(sd, strict=False) + + def get_sd(self): + return self.model.state_dict() + + def encode_image(self, image): + ldm_patched.modules.model_management.load_model_gpu(self.patcher) + pixel_values = clip_preprocess(image.to(self.load_device)).float() + out = self.model(pixel_values=pixel_values, intermediate_output=-2) + + outputs = Output() + outputs["last_hidden_state"] = out[0].to(ldm_patched.modules.model_management.intermediate_device()) + outputs["image_embeds"] = out[2].to(ldm_patched.modules.model_management.intermediate_device()) + outputs["penultimate_hidden_states"] = out[1].to(ldm_patched.modules.model_management.intermediate_device()) + return outputs + +def convert_to_transformers(sd, prefix): + sd_k = sd.keys() + if "{}transformer.resblocks.0.attn.in_proj_weight".format(prefix) in sd_k: + keys_to_replace = { + "{}class_embedding".format(prefix): "vision_model.embeddings.class_embedding", + "{}conv1.weight".format(prefix): "vision_model.embeddings.patch_embedding.weight", + "{}positional_embedding".format(prefix): "vision_model.embeddings.position_embedding.weight", + "{}ln_post.bias".format(prefix): "vision_model.post_layernorm.bias", + "{}ln_post.weight".format(prefix): "vision_model.post_layernorm.weight", + "{}ln_pre.bias".format(prefix): "vision_model.pre_layrnorm.bias", + "{}ln_pre.weight".format(prefix): "vision_model.pre_layrnorm.weight", + } + + for x in keys_to_replace: + if x in sd_k: + sd[keys_to_replace[x]] = sd.pop(x) + + if "{}proj".format(prefix) in sd_k: + sd['visual_projection.weight'] = sd.pop("{}proj".format(prefix)).transpose(0, 1) + + sd = transformers_convert(sd, prefix, "vision_model.", 48) + else: + replace_prefix = {prefix: ""} + sd = state_dict_prefix_replace(sd, replace_prefix) + return sd + +def load_clipvision_from_sd(sd, prefix="", convert_keys=False): + if convert_keys: + sd = convert_to_transformers(sd, prefix) + if "vision_model.encoder.layers.47.layer_norm1.weight" in sd: + json_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "clip_vision_config_g.json") + elif "vision_model.encoder.layers.30.layer_norm1.weight" in sd: + json_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "clip_vision_config_h.json") + elif "vision_model.encoder.layers.22.layer_norm1.weight" in sd: + json_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "clip_vision_config_vitl.json") + else: + return None + + clip = ClipVisionModel(json_config) + m, u = clip.load_sd(sd) + if len(m) > 0: + print("extra clip vision:", m) + u = set(u) + keys = list(sd.keys()) + for k in keys: + if k not in u: + t = sd.pop(k) + del t + return clip + +def load(ckpt_path): + sd = load_torch_file(ckpt_path) + if "visual.transformer.resblocks.0.attn.in_proj_weight" in sd: + return load_clipvision_from_sd(sd, prefix="visual.", convert_keys=True) + else: + return load_clipvision_from_sd(sd) diff --git a/ldm_patched/modules/clip_vision_config_g.json b/ldm_patched/modules/clip_vision_config_g.json new file mode 100644 index 0000000000000000000000000000000000000000..708e7e21ac3513a719d6a49e88e756f5ef7e2c8d --- /dev/null +++ b/ldm_patched/modules/clip_vision_config_g.json @@ -0,0 +1,18 @@ +{ + "attention_dropout": 0.0, + "dropout": 0.0, + "hidden_act": "gelu", + "hidden_size": 1664, + "image_size": 224, + "initializer_factor": 1.0, + "initializer_range": 0.02, + "intermediate_size": 8192, + "layer_norm_eps": 1e-05, + "model_type": "clip_vision_model", + "num_attention_heads": 16, + "num_channels": 3, + "num_hidden_layers": 48, + "patch_size": 14, + "projection_dim": 1280, + "torch_dtype": "float32" +} diff --git a/ldm_patched/modules/clip_vision_config_h.json b/ldm_patched/modules/clip_vision_config_h.json new file mode 100644 index 0000000000000000000000000000000000000000..bb71be419a4be0ad5c8c157850de032a65593cb9 --- /dev/null +++ b/ldm_patched/modules/clip_vision_config_h.json @@ -0,0 +1,18 @@ +{ + "attention_dropout": 0.0, + "dropout": 0.0, + "hidden_act": "gelu", + "hidden_size": 1280, + "image_size": 224, + "initializer_factor": 1.0, + "initializer_range": 0.02, + "intermediate_size": 5120, + "layer_norm_eps": 1e-05, + "model_type": "clip_vision_model", + "num_attention_heads": 16, + "num_channels": 3, + "num_hidden_layers": 32, + "patch_size": 14, + "projection_dim": 1024, + "torch_dtype": "float32" +} diff --git a/ldm_patched/modules/clip_vision_config_vitl.json b/ldm_patched/modules/clip_vision_config_vitl.json new file mode 100644 index 0000000000000000000000000000000000000000..c59b8ed5a4c1f41fbcc9e6811d2c7dfe44273de7 --- /dev/null +++ b/ldm_patched/modules/clip_vision_config_vitl.json @@ -0,0 +1,18 @@ +{ + "attention_dropout": 0.0, + "dropout": 0.0, + "hidden_act": "quick_gelu", + "hidden_size": 1024, + "image_size": 224, + "initializer_factor": 1.0, + "initializer_range": 0.02, + "intermediate_size": 4096, + "layer_norm_eps": 1e-05, + "model_type": "clip_vision_model", + "num_attention_heads": 16, + "num_channels": 3, + "num_hidden_layers": 24, + "patch_size": 14, + "projection_dim": 768, + "torch_dtype": "float32" +} diff --git a/ldm_patched/modules/conds.py b/ldm_patched/modules/conds.py new file mode 100644 index 0000000000000000000000000000000000000000..0ee184bc82d3f9988c5f9a2acea08ec5dee3e600 --- /dev/null +++ b/ldm_patched/modules/conds.py @@ -0,0 +1,76 @@ +import torch +import math +import ldm_patched.modules.utils + + + +class CONDRegular: + def __init__(self, cond): + self.cond = cond + + def _copy_with(self, cond): + return self.__class__(cond) + + def process_cond(self, batch_size, device, **kwargs): + return self._copy_with(ldm_patched.modules.utils.repeat_to_batch_size(self.cond, batch_size).to(device)) + + def can_concat(self, other): + if self.cond.shape != other.cond.shape: + return False + return True + + def concat(self, others): + conds = [self.cond] + for x in others: + conds.append(x.cond) + return torch.cat(conds) + +class CONDNoiseShape(CONDRegular): + def process_cond(self, batch_size, device, area, **kwargs): + data = self.cond[:,:,area[2]:area[0] + area[2],area[3]:area[1] + area[3]] + return self._copy_with(ldm_patched.modules.utils.repeat_to_batch_size(data, batch_size).to(device)) + + +class CONDCrossAttn(CONDRegular): + def can_concat(self, other): + s1 = self.cond.shape + s2 = other.cond.shape + if s1 != s2: + if s1[0] != s2[0] or s1[2] != s2[2]: #these 2 cases should not happen + return False + + mult_min = math.lcm(s1[1], s2[1]) + diff = mult_min // min(s1[1], s2[1]) + if diff > 4: #arbitrary limit on the padding because it's probably going to impact performance negatively if it's too much + return False + return True + + def concat(self, others): + conds = [self.cond] + crossattn_max_len = self.cond.shape[1] + for x in others: + c = x.cond + crossattn_max_len = math.lcm(crossattn_max_len, c.shape[1]) + conds.append(c) + + out = [] + for c in conds: + if c.shape[1] < crossattn_max_len: + c = c.repeat(1, crossattn_max_len // c.shape[1], 1) #padding with repeat doesn't change result + out.append(c) + return torch.cat(out) + +class CONDConstant(CONDRegular): + def __init__(self, cond): + self.cond = cond + + def process_cond(self, batch_size, device, **kwargs): + return self._copy_with(self.cond) + + def can_concat(self, other): + if self.cond != other.cond: + return False + return True + + def concat(self, others): + return self.cond diff --git a/ldm_patched/modules/controlnet.py b/ldm_patched/modules/controlnet.py new file mode 100644 index 0000000000000000000000000000000000000000..7e11497fe4982f2995056f2825e7273777e052e8 --- /dev/null +++ b/ldm_patched/modules/controlnet.py @@ -0,0 +1,516 @@ +import torch +import math +import os +import ldm_patched.modules.utils +import ldm_patched.modules.model_management +import ldm_patched.modules.model_detection +import ldm_patched.modules.model_patcher +import ldm_patched.modules.ops + +import ldm_patched.controlnet.cldm +import ldm_patched.t2ia.adapter + + +def broadcast_image_to(tensor, target_batch_size, batched_number): + current_batch_size = tensor.shape[0] + #print(current_batch_size, target_batch_size) + if current_batch_size == 1: + return tensor + + per_batch = target_batch_size // batched_number + tensor = tensor[:per_batch] + + if per_batch > tensor.shape[0]: + tensor = torch.cat([tensor] * (per_batch // tensor.shape[0]) + [tensor[:(per_batch % tensor.shape[0])]], dim=0) + + current_batch_size = tensor.shape[0] + if current_batch_size == target_batch_size: + return tensor + else: + return torch.cat([tensor] * batched_number, dim=0) + +class ControlBase: + def __init__(self, device=None): + self.cond_hint_original = None + self.cond_hint = None + self.strength = 1.0 + self.timestep_percent_range = (0.0, 1.0) + self.global_average_pooling = False + self.timestep_range = None + + if device is None: + device = ldm_patched.modules.model_management.get_torch_device() + self.device = device + self.previous_controlnet = None + + def set_cond_hint(self, cond_hint, strength=1.0, timestep_percent_range=(0.0, 1.0)): + self.cond_hint_original = cond_hint + self.strength = strength + self.timestep_percent_range = timestep_percent_range + return self + + def pre_run(self, model, percent_to_timestep_function): + self.timestep_range = (percent_to_timestep_function(self.timestep_percent_range[0]), percent_to_timestep_function(self.timestep_percent_range[1])) + if self.previous_controlnet is not None: + self.previous_controlnet.pre_run(model, percent_to_timestep_function) + + def set_previous_controlnet(self, controlnet): + self.previous_controlnet = controlnet + return self + + def cleanup(self): + if self.previous_controlnet is not None: + self.previous_controlnet.cleanup() + if self.cond_hint is not None: + del self.cond_hint + self.cond_hint = None + self.timestep_range = None + + def get_models(self): + out = [] + if self.previous_controlnet is not None: + out += self.previous_controlnet.get_models() + return out + + def copy_to(self, c): + c.cond_hint_original = self.cond_hint_original + c.strength = self.strength + c.timestep_percent_range = self.timestep_percent_range + c.global_average_pooling = self.global_average_pooling + + def inference_memory_requirements(self, dtype): + if self.previous_controlnet is not None: + return self.previous_controlnet.inference_memory_requirements(dtype) + return 0 + + def control_merge(self, control_input, control_output, control_prev, output_dtype): + out = {'input':[], 'middle':[], 'output': []} + + if control_input is not None: + for i in range(len(control_input)): + key = 'input' + x = control_input[i] + if x is not None: + x *= self.strength + if x.dtype != output_dtype: + x = x.to(output_dtype) + out[key].insert(0, x) + + if control_output is not None: + for i in range(len(control_output)): + if i == (len(control_output) - 1): + key = 'middle' + index = 0 + else: + key = 'output' + index = i + x = control_output[i] + if x is not None: + if self.global_average_pooling: + x = torch.mean(x, dim=(2, 3), keepdim=True).repeat(1, 1, x.shape[2], x.shape[3]) + + x *= self.strength + if x.dtype != output_dtype: + x = x.to(output_dtype) + + out[key].append(x) + if control_prev is not None: + for x in ['input', 'middle', 'output']: + o = out[x] + for i in range(len(control_prev[x])): + prev_val = control_prev[x][i] + if i >= len(o): + o.append(prev_val) + elif prev_val is not None: + if o[i] is None: + o[i] = prev_val + else: + if o[i].shape[0] < prev_val.shape[0]: + o[i] = prev_val + o[i] + else: + o[i] += prev_val + return out + +class ControlNet(ControlBase): + def __init__(self, control_model, global_average_pooling=False, device=None, load_device=None, manual_cast_dtype=None): + super().__init__(device) + self.control_model = control_model + self.load_device = load_device + self.control_model_wrapped = ldm_patched.modules.model_patcher.ModelPatcher(self.control_model, load_device=load_device, offload_device=ldm_patched.modules.model_management.unet_offload_device()) + self.global_average_pooling = global_average_pooling + self.model_sampling_current = None + self.manual_cast_dtype = manual_cast_dtype + + def get_control(self, x_noisy, t, cond, batched_number): + control_prev = None + if self.previous_controlnet is not None: + control_prev = self.previous_controlnet.get_control(x_noisy, t, cond, batched_number) + + if self.timestep_range is not None: + if t[0] > self.timestep_range[0] or t[0] < self.timestep_range[1]: + if control_prev is not None: + return control_prev + else: + return None + + dtype = self.control_model.dtype + if self.manual_cast_dtype is not None: + dtype = self.manual_cast_dtype + + output_dtype = x_noisy.dtype + if self.cond_hint is None or x_noisy.shape[2] * 8 != self.cond_hint.shape[2] or x_noisy.shape[3] * 8 != self.cond_hint.shape[3]: + if self.cond_hint is not None: + del self.cond_hint + self.cond_hint = None + self.cond_hint = ldm_patched.modules.utils.common_upscale(self.cond_hint_original, x_noisy.shape[3] * 8, x_noisy.shape[2] * 8, 'nearest-exact', "center").to(dtype).to(self.device) + if x_noisy.shape[0] != self.cond_hint.shape[0]: + self.cond_hint = broadcast_image_to(self.cond_hint, x_noisy.shape[0], batched_number) + + context = cond['c_crossattn'] + y = cond.get('y', None) + if y is not None: + y = y.to(dtype) + timestep = self.model_sampling_current.timestep(t) + x_noisy = self.model_sampling_current.calculate_input(t, x_noisy) + + control = self.control_model(x=x_noisy.to(dtype), hint=self.cond_hint, timesteps=timestep.float(), context=context.to(dtype), y=y) + return self.control_merge(None, control, control_prev, output_dtype) + + def copy(self): + c = ControlNet(self.control_model, global_average_pooling=self.global_average_pooling, load_device=self.load_device, manual_cast_dtype=self.manual_cast_dtype) + self.copy_to(c) + return c + + def get_models(self): + out = super().get_models() + out.append(self.control_model_wrapped) + return out + + def pre_run(self, model, percent_to_timestep_function): + super().pre_run(model, percent_to_timestep_function) + self.model_sampling_current = model.model_sampling + + def cleanup(self): + self.model_sampling_current = None + super().cleanup() + +class ControlLoraOps: + class Linear(torch.nn.Module): + def __init__(self, in_features: int, out_features: int, bias: bool = True, + device=None, dtype=None) -> None: + factory_kwargs = {'device': device, 'dtype': dtype} + super().__init__() + self.in_features = in_features + self.out_features = out_features + self.weight = None + self.up = None + self.down = None + self.bias = None + + def forward(self, input): + weight, bias = ldm_patched.modules.ops.cast_bias_weight(self, input) + if self.up is not None: + return torch.nn.functional.linear(input, weight + (torch.mm(self.up.flatten(start_dim=1), self.down.flatten(start_dim=1))).reshape(self.weight.shape).type(input.dtype), bias) + else: + return torch.nn.functional.linear(input, weight, bias) + + class Conv2d(torch.nn.Module): + def __init__( + self, + in_channels, + out_channels, + kernel_size, + stride=1, + padding=0, + dilation=1, + groups=1, + bias=True, + padding_mode='zeros', + device=None, + dtype=None + ): + super().__init__() + self.in_channels = in_channels + self.out_channels = out_channels + self.kernel_size = kernel_size + self.stride = stride + self.padding = padding + self.dilation = dilation + self.transposed = False + self.output_padding = 0 + self.groups = groups + self.padding_mode = padding_mode + + self.weight = None + self.bias = None + self.up = None + self.down = None + + + def forward(self, input): + weight, bias = ldm_patched.modules.ops.cast_bias_weight(self, input) + if self.up is not None: + return torch.nn.functional.conv2d(input, weight + (torch.mm(self.up.flatten(start_dim=1), self.down.flatten(start_dim=1))).reshape(self.weight.shape).type(input.dtype), bias, self.stride, self.padding, self.dilation, self.groups) + else: + return torch.nn.functional.conv2d(input, weight, bias, self.stride, self.padding, self.dilation, self.groups) + + +class ControlLora(ControlNet): + def __init__(self, control_weights, global_average_pooling=False, device=None): + ControlBase.__init__(self, device) + self.control_weights = control_weights + self.global_average_pooling = global_average_pooling + + def pre_run(self, model, percent_to_timestep_function): + super().pre_run(model, percent_to_timestep_function) + controlnet_config = model.model_config.unet_config.copy() + controlnet_config.pop("out_channels") + controlnet_config["hint_channels"] = self.control_weights["input_hint_block.0.weight"].shape[1] + self.manual_cast_dtype = model.manual_cast_dtype + dtype = model.get_dtype() + if self.manual_cast_dtype is None: + class control_lora_ops(ControlLoraOps, ldm_patched.modules.ops.disable_weight_init): + pass + else: + class control_lora_ops(ControlLoraOps, ldm_patched.modules.ops.manual_cast): + pass + dtype = self.manual_cast_dtype + + controlnet_config["operations"] = control_lora_ops + controlnet_config["dtype"] = dtype + self.control_model = ldm_patched.controlnet.cldm.ControlNet(**controlnet_config) + self.control_model.to(ldm_patched.modules.model_management.get_torch_device()) + diffusion_model = model.diffusion_model + sd = diffusion_model.state_dict() + cm = self.control_model.state_dict() + + for k in sd: + weight = sd[k] + try: + ldm_patched.modules.utils.set_attr(self.control_model, k, weight) + except: + pass + + for k in self.control_weights: + if k not in {"lora_controlnet"}: + ldm_patched.modules.utils.set_attr(self.control_model, k, self.control_weights[k].to(dtype).to(ldm_patched.modules.model_management.get_torch_device())) + + def copy(self): + c = ControlLora(self.control_weights, global_average_pooling=self.global_average_pooling) + self.copy_to(c) + return c + + def cleanup(self): + del self.control_model + self.control_model = None + super().cleanup() + + def get_models(self): + out = ControlBase.get_models(self) + return out + + def inference_memory_requirements(self, dtype): + return ldm_patched.modules.utils.calculate_parameters(self.control_weights) * ldm_patched.modules.model_management.dtype_size(dtype) + ControlBase.inference_memory_requirements(self, dtype) + +def load_controlnet(ckpt_path, model=None): + controlnet_data = ldm_patched.modules.utils.load_torch_file(ckpt_path, safe_load=True) + if "lora_controlnet" in controlnet_data: + return ControlLora(controlnet_data) + + controlnet_config = None + if "controlnet_cond_embedding.conv_in.weight" in controlnet_data: #diffusers format + unet_dtype = ldm_patched.modules.model_management.unet_dtype() + controlnet_config = ldm_patched.modules.model_detection.unet_config_from_diffusers_unet(controlnet_data, unet_dtype) + diffusers_keys = ldm_patched.modules.utils.unet_to_diffusers(controlnet_config) + diffusers_keys["controlnet_mid_block.weight"] = "middle_block_out.0.weight" + diffusers_keys["controlnet_mid_block.bias"] = "middle_block_out.0.bias" + + count = 0 + loop = True + while loop: + suffix = [".weight", ".bias"] + for s in suffix: + k_in = "controlnet_down_blocks.{}{}".format(count, s) + k_out = "zero_convs.{}.0{}".format(count, s) + if k_in not in controlnet_data: + loop = False + break + diffusers_keys[k_in] = k_out + count += 1 + + count = 0 + loop = True + while loop: + suffix = [".weight", ".bias"] + for s in suffix: + if count == 0: + k_in = "controlnet_cond_embedding.conv_in{}".format(s) + else: + k_in = "controlnet_cond_embedding.blocks.{}{}".format(count - 1, s) + k_out = "input_hint_block.{}{}".format(count * 2, s) + if k_in not in controlnet_data: + k_in = "controlnet_cond_embedding.conv_out{}".format(s) + loop = False + diffusers_keys[k_in] = k_out + count += 1 + + new_sd = {} + for k in diffusers_keys: + if k in controlnet_data: + new_sd[diffusers_keys[k]] = controlnet_data.pop(k) + + leftover_keys = controlnet_data.keys() + if len(leftover_keys) > 0: + print("leftover keys:", leftover_keys) + controlnet_data = new_sd + + pth_key = 'control_model.zero_convs.0.0.weight' + pth = False + key = 'zero_convs.0.0.weight' + if pth_key in controlnet_data: + pth = True + key = pth_key + prefix = "control_model." + elif key in controlnet_data: + prefix = "" + else: + net = load_t2i_adapter(controlnet_data) + if net is None: + print("error checkpoint does not contain controlnet or t2i adapter data", ckpt_path) + return net + + if controlnet_config is None: + unet_dtype = ldm_patched.modules.model_management.unet_dtype() + controlnet_config = ldm_patched.modules.model_detection.model_config_from_unet(controlnet_data, prefix, unet_dtype, True).unet_config + load_device = ldm_patched.modules.model_management.get_torch_device() + manual_cast_dtype = ldm_patched.modules.model_management.unet_manual_cast(unet_dtype, load_device) + if manual_cast_dtype is not None: + controlnet_config["operations"] = ldm_patched.modules.ops.manual_cast + controlnet_config.pop("out_channels") + controlnet_config["hint_channels"] = controlnet_data["{}input_hint_block.0.weight".format(prefix)].shape[1] + control_model = ldm_patched.controlnet.cldm.ControlNet(**controlnet_config) + + if pth: + if 'difference' in controlnet_data: + if model is not None: + ldm_patched.modules.model_management.load_models_gpu([model]) + model_sd = model.model_state_dict() + for x in controlnet_data: + c_m = "control_model." + if x.startswith(c_m): + sd_key = "diffusion_model.{}".format(x[len(c_m):]) + if sd_key in model_sd: + cd = controlnet_data[x] + cd += model_sd[sd_key].type(cd.dtype).to(cd.device) + else: + print("WARNING: Loaded a diff controlnet without a model. It will very likely not work.") + + class WeightsLoader(torch.nn.Module): + pass + w = WeightsLoader() + w.control_model = control_model + missing, unexpected = w.load_state_dict(controlnet_data, strict=False) + else: + missing, unexpected = control_model.load_state_dict(controlnet_data, strict=False) + print(missing, unexpected) + + global_average_pooling = False + filename = os.path.splitext(ckpt_path)[0] + if filename.endswith("_shuffle") or filename.endswith("_shuffle_fp16"): #TODO: smarter way of enabling global_average_pooling + global_average_pooling = True + + control = ControlNet(control_model, global_average_pooling=global_average_pooling, load_device=load_device, manual_cast_dtype=manual_cast_dtype) + return control + +class T2IAdapter(ControlBase): + def __init__(self, t2i_model, channels_in, device=None): + super().__init__(device) + self.t2i_model = t2i_model + self.channels_in = channels_in + self.control_input = None + + def scale_image_to(self, width, height): + unshuffle_amount = self.t2i_model.unshuffle_amount + width = math.ceil(width / unshuffle_amount) * unshuffle_amount + height = math.ceil(height / unshuffle_amount) * unshuffle_amount + return width, height + + def get_control(self, x_noisy, t, cond, batched_number): + control_prev = None + if self.previous_controlnet is not None: + control_prev = self.previous_controlnet.get_control(x_noisy, t, cond, batched_number) + + if self.timestep_range is not None: + if t[0] > self.timestep_range[0] or t[0] < self.timestep_range[1]: + if control_prev is not None: + return control_prev + else: + return None + + if self.cond_hint is None or x_noisy.shape[2] * 8 != self.cond_hint.shape[2] or x_noisy.shape[3] * 8 != self.cond_hint.shape[3]: + if self.cond_hint is not None: + del self.cond_hint + self.control_input = None + self.cond_hint = None + width, height = self.scale_image_to(x_noisy.shape[3] * 8, x_noisy.shape[2] * 8) + self.cond_hint = ldm_patched.modules.utils.common_upscale(self.cond_hint_original, width, height, 'nearest-exact', "center").float().to(self.device) + if self.channels_in == 1 and self.cond_hint.shape[1] > 1: + self.cond_hint = torch.mean(self.cond_hint, 1, keepdim=True) + if x_noisy.shape[0] != self.cond_hint.shape[0]: + self.cond_hint = broadcast_image_to(self.cond_hint, x_noisy.shape[0], batched_number) + if self.control_input is None: + self.t2i_model.to(x_noisy.dtype) + self.t2i_model.to(self.device) + self.control_input = self.t2i_model(self.cond_hint.to(x_noisy.dtype)) + self.t2i_model.cpu() + + control_input = list(map(lambda a: None if a is None else a.clone(), self.control_input)) + mid = None + if self.t2i_model.xl == True: + mid = control_input[-1:] + control_input = control_input[:-1] + return self.control_merge(control_input, mid, control_prev, x_noisy.dtype) + + def copy(self): + c = T2IAdapter(self.t2i_model, self.channels_in) + self.copy_to(c) + return c + +def load_t2i_adapter(t2i_data): + if 'adapter' in t2i_data: + t2i_data = t2i_data['adapter'] + if 'adapter.body.0.resnets.0.block1.weight' in t2i_data: #diffusers format + prefix_replace = {} + for i in range(4): + for j in range(2): + prefix_replace["adapter.body.{}.resnets.{}.".format(i, j)] = "body.{}.".format(i * 2 + j) + prefix_replace["adapter.body.{}.".format(i, j)] = "body.{}.".format(i * 2) + prefix_replace["adapter."] = "" + t2i_data = ldm_patched.modules.utils.state_dict_prefix_replace(t2i_data, prefix_replace) + keys = t2i_data.keys() + + if "body.0.in_conv.weight" in keys: + cin = t2i_data['body.0.in_conv.weight'].shape[1] + model_ad = ldm_patched.t2ia.adapter.Adapter_light(cin=cin, channels=[320, 640, 1280, 1280], nums_rb=4) + elif 'conv_in.weight' in keys: + cin = t2i_data['conv_in.weight'].shape[1] + channel = t2i_data['conv_in.weight'].shape[0] + ksize = t2i_data['body.0.block2.weight'].shape[2] + use_conv = False + down_opts = list(filter(lambda a: a.endswith("down_opt.op.weight"), keys)) + if len(down_opts) > 0: + use_conv = True + xl = False + if cin == 256 or cin == 768: + xl = True + model_ad = ldm_patched.t2ia.adapter.Adapter(cin=cin, channels=[channel, channel*2, channel*4, channel*4][:4], nums_rb=2, ksize=ksize, sk=True, use_conv=use_conv, xl=xl) + else: + return None + missing, unexpected = model_ad.load_state_dict(t2i_data) + if len(missing) > 0: + print("t2i missing", missing) + + if len(unexpected) > 0: + print("t2i unexpected", unexpected) + + return T2IAdapter(model_ad, model_ad.input_channels) diff --git a/ldm_patched/modules/diffusers_convert.py b/ldm_patched/modules/diffusers_convert.py new file mode 100644 index 0000000000000000000000000000000000000000..a9eb9302f14f4fa2710c9652e0b58c6453c0cf7b --- /dev/null +++ b/ldm_patched/modules/diffusers_convert.py @@ -0,0 +1,261 @@ +import re +import torch + +# conversion code from https://github.com/huggingface/diffusers/blob/main/scripts/convert_diffusers_to_original_stable_diffusion.py + +# =================# +# UNet Conversion # +# =================# + +unet_conversion_map = [ + # (stable-diffusion, HF Diffusers) + ("time_embed.0.weight", "time_embedding.linear_1.weight"), + ("time_embed.0.bias", "time_embedding.linear_1.bias"), + ("time_embed.2.weight", "time_embedding.linear_2.weight"), + ("time_embed.2.bias", "time_embedding.linear_2.bias"), + ("input_blocks.0.0.weight", "conv_in.weight"), + ("input_blocks.0.0.bias", "conv_in.bias"), + ("out.0.weight", "conv_norm_out.weight"), + ("out.0.bias", "conv_norm_out.bias"), + ("out.2.weight", "conv_out.weight"), + ("out.2.bias", "conv_out.bias"), +] + +unet_conversion_map_resnet = [ + # (stable-diffusion, HF Diffusers) + ("in_layers.0", "norm1"), + ("in_layers.2", "conv1"), + ("out_layers.0", "norm2"), + ("out_layers.3", "conv2"), + ("emb_layers.1", "time_emb_proj"), + ("skip_connection", "conv_shortcut"), +] + +unet_conversion_map_layer = [] +# hardcoded number of downblocks and resnets/attentions... +# would need smarter logic for other networks. +for i in range(4): + # loop over downblocks/upblocks + + for j in range(2): + # loop over resnets/attentions for downblocks + hf_down_res_prefix = f"down_blocks.{i}.resnets.{j}." + sd_down_res_prefix = f"input_blocks.{3 * i + j + 1}.0." + unet_conversion_map_layer.append((sd_down_res_prefix, hf_down_res_prefix)) + + if i < 3: + # no attention layers in down_blocks.3 + hf_down_atn_prefix = f"down_blocks.{i}.attentions.{j}." + sd_down_atn_prefix = f"input_blocks.{3 * i + j + 1}.1." + unet_conversion_map_layer.append((sd_down_atn_prefix, hf_down_atn_prefix)) + + for j in range(3): + # loop over resnets/attentions for upblocks + hf_up_res_prefix = f"up_blocks.{i}.resnets.{j}." + sd_up_res_prefix = f"output_blocks.{3 * i + j}.0." + unet_conversion_map_layer.append((sd_up_res_prefix, hf_up_res_prefix)) + + if i > 0: + # no attention layers in up_blocks.0 + hf_up_atn_prefix = f"up_blocks.{i}.attentions.{j}." + sd_up_atn_prefix = f"output_blocks.{3 * i + j}.1." + unet_conversion_map_layer.append((sd_up_atn_prefix, hf_up_atn_prefix)) + + if i < 3: + # no downsample in down_blocks.3 + hf_downsample_prefix = f"down_blocks.{i}.downsamplers.0.conv." + sd_downsample_prefix = f"input_blocks.{3 * (i + 1)}.0.op." + unet_conversion_map_layer.append((sd_downsample_prefix, hf_downsample_prefix)) + + # no upsample in up_blocks.3 + hf_upsample_prefix = f"up_blocks.{i}.upsamplers.0." + sd_upsample_prefix = f"output_blocks.{3 * i + 2}.{1 if i == 0 else 2}." + unet_conversion_map_layer.append((sd_upsample_prefix, hf_upsample_prefix)) + +hf_mid_atn_prefix = "mid_block.attentions.0." +sd_mid_atn_prefix = "middle_block.1." +unet_conversion_map_layer.append((sd_mid_atn_prefix, hf_mid_atn_prefix)) + +for j in range(2): + hf_mid_res_prefix = f"mid_block.resnets.{j}." + sd_mid_res_prefix = f"middle_block.{2 * j}." + unet_conversion_map_layer.append((sd_mid_res_prefix, hf_mid_res_prefix)) + + +def convert_unet_state_dict(unet_state_dict): + # buyer beware: this is a *brittle* function, + # and correct output requires that all of these pieces interact in + # the exact order in which I have arranged them. + mapping = {k: k for k in unet_state_dict.keys()} + for sd_name, hf_name in unet_conversion_map: + mapping[hf_name] = sd_name + for k, v in mapping.items(): + if "resnets" in k: + for sd_part, hf_part in unet_conversion_map_resnet: + v = v.replace(hf_part, sd_part) + mapping[k] = v + for k, v in mapping.items(): + for sd_part, hf_part in unet_conversion_map_layer: + v = v.replace(hf_part, sd_part) + mapping[k] = v + new_state_dict = {v: unet_state_dict[k] for k, v in mapping.items()} + return new_state_dict + + +# ================# +# VAE Conversion # +# ================# + +vae_conversion_map = [ + # (stable-diffusion, HF Diffusers) + ("nin_shortcut", "conv_shortcut"), + ("norm_out", "conv_norm_out"), + ("mid.attn_1.", "mid_block.attentions.0."), +] + +for i in range(4): + # down_blocks have two resnets + for j in range(2): + hf_down_prefix = f"encoder.down_blocks.{i}.resnets.{j}." + sd_down_prefix = f"encoder.down.{i}.block.{j}." + vae_conversion_map.append((sd_down_prefix, hf_down_prefix)) + + if i < 3: + hf_downsample_prefix = f"down_blocks.{i}.downsamplers.0." + sd_downsample_prefix = f"down.{i}.downsample." + vae_conversion_map.append((sd_downsample_prefix, hf_downsample_prefix)) + + hf_upsample_prefix = f"up_blocks.{i}.upsamplers.0." + sd_upsample_prefix = f"up.{3 - i}.upsample." + vae_conversion_map.append((sd_upsample_prefix, hf_upsample_prefix)) + + # up_blocks have three resnets + # also, up blocks in hf are numbered in reverse from sd + for j in range(3): + hf_up_prefix = f"decoder.up_blocks.{i}.resnets.{j}." + sd_up_prefix = f"decoder.up.{3 - i}.block.{j}." + vae_conversion_map.append((sd_up_prefix, hf_up_prefix)) + +# this part accounts for mid blocks in both the encoder and the decoder +for i in range(2): + hf_mid_res_prefix = f"mid_block.resnets.{i}." + sd_mid_res_prefix = f"mid.block_{i + 1}." + vae_conversion_map.append((sd_mid_res_prefix, hf_mid_res_prefix)) + +vae_conversion_map_attn = [ + # (stable-diffusion, HF Diffusers) + ("norm.", "group_norm."), + ("q.", "query."), + ("k.", "key."), + ("v.", "value."), + ("q.", "to_q."), + ("k.", "to_k."), + ("v.", "to_v."), + ("proj_out.", "to_out.0."), + ("proj_out.", "proj_attn."), +] + + +def reshape_weight_for_sd(w): + # convert HF linear weights to SD conv2d weights + return w.reshape(*w.shape, 1, 1) + + +def convert_vae_state_dict(vae_state_dict): + mapping = {k: k for k in vae_state_dict.keys()} + for k, v in mapping.items(): + for sd_part, hf_part in vae_conversion_map: + v = v.replace(hf_part, sd_part) + mapping[k] = v + for k, v in mapping.items(): + if "attentions" in k: + for sd_part, hf_part in vae_conversion_map_attn: + v = v.replace(hf_part, sd_part) + mapping[k] = v + new_state_dict = {v: vae_state_dict[k] for k, v in mapping.items()} + weights_to_convert = ["q", "k", "v", "proj_out"] + for k, v in new_state_dict.items(): + for weight_name in weights_to_convert: + if f"mid.attn_1.{weight_name}.weight" in k: + print(f"Reshaping {k} for SD format") + new_state_dict[k] = reshape_weight_for_sd(v) + return new_state_dict + + +# =========================# +# Text Encoder Conversion # +# =========================# + + +textenc_conversion_lst = [ + # (stable-diffusion, HF Diffusers) + ("resblocks.", "text_model.encoder.layers."), + ("ln_1", "layer_norm1"), + ("ln_2", "layer_norm2"), + (".c_fc.", ".fc1."), + (".c_proj.", ".fc2."), + (".attn", ".self_attn"), + ("ln_final.", "transformer.text_model.final_layer_norm."), + ("token_embedding.weight", "transformer.text_model.embeddings.token_embedding.weight"), + ("positional_embedding", "transformer.text_model.embeddings.position_embedding.weight"), +] +protected = {re.escape(x[1]): x[0] for x in textenc_conversion_lst} +textenc_pattern = re.compile("|".join(protected.keys())) + +# Ordering is from https://github.com/pytorch/pytorch/blob/master/test/cpp/api/modules.cpp +code2idx = {"q": 0, "k": 1, "v": 2} + + +def convert_text_enc_state_dict_v20(text_enc_dict, prefix=""): + new_state_dict = {} + capture_qkv_weight = {} + capture_qkv_bias = {} + for k, v in text_enc_dict.items(): + if not k.startswith(prefix): + continue + if ( + k.endswith(".self_attn.q_proj.weight") + or k.endswith(".self_attn.k_proj.weight") + or k.endswith(".self_attn.v_proj.weight") + ): + k_pre = k[: -len(".q_proj.weight")] + k_code = k[-len("q_proj.weight")] + if k_pre not in capture_qkv_weight: + capture_qkv_weight[k_pre] = [None, None, None] + capture_qkv_weight[k_pre][code2idx[k_code]] = v + continue + + if ( + k.endswith(".self_attn.q_proj.bias") + or k.endswith(".self_attn.k_proj.bias") + or k.endswith(".self_attn.v_proj.bias") + ): + k_pre = k[: -len(".q_proj.bias")] + k_code = k[-len("q_proj.bias")] + if k_pre not in capture_qkv_bias: + capture_qkv_bias[k_pre] = [None, None, None] + capture_qkv_bias[k_pre][code2idx[k_code]] = v + continue + + relabelled_key = textenc_pattern.sub(lambda m: protected[re.escape(m.group(0))], k) + new_state_dict[relabelled_key] = v + + for k_pre, tensors in capture_qkv_weight.items(): + if None in tensors: + raise Exception("CORRUPTED MODEL: one of the q-k-v values for the text encoder was missing") + relabelled_key = textenc_pattern.sub(lambda m: protected[re.escape(m.group(0))], k_pre) + new_state_dict[relabelled_key + ".in_proj_weight"] = torch.cat(tensors) + + for k_pre, tensors in capture_qkv_bias.items(): + if None in tensors: + raise Exception("CORRUPTED MODEL: one of the q-k-v values for the text encoder was missing") + relabelled_key = textenc_pattern.sub(lambda m: protected[re.escape(m.group(0))], k_pre) + new_state_dict[relabelled_key + ".in_proj_bias"] = torch.cat(tensors) + + return new_state_dict + + +def convert_text_enc_state_dict(text_enc_dict): + return text_enc_dict + + diff --git a/ldm_patched/modules/diffusers_load.py b/ldm_patched/modules/diffusers_load.py new file mode 100644 index 0000000000000000000000000000000000000000..62edc72b8b8c6b0cf49b1e675d4df45510b6f2ed --- /dev/null +++ b/ldm_patched/modules/diffusers_load.py @@ -0,0 +1,36 @@ +import os + +import ldm_patched.modules.sd + +def first_file(path, filenames): + for f in filenames: + p = os.path.join(path, f) + if os.path.exists(p): + return p + return None + +def load_diffusers(model_path, output_vae=True, output_clip=True, embedding_directory=None): + diffusion_model_names = ["diffusion_pytorch_model.fp16.safetensors", "diffusion_pytorch_model.safetensors", "diffusion_pytorch_model.fp16.bin", "diffusion_pytorch_model.bin"] + unet_path = first_file(os.path.join(model_path, "unet"), diffusion_model_names) + vae_path = first_file(os.path.join(model_path, "vae"), diffusion_model_names) + + text_encoder_model_names = ["model.fp16.safetensors", "model.safetensors", "pytorch_model.fp16.bin", "pytorch_model.bin"] + text_encoder1_path = first_file(os.path.join(model_path, "text_encoder"), text_encoder_model_names) + text_encoder2_path = first_file(os.path.join(model_path, "text_encoder_2"), text_encoder_model_names) + + text_encoder_paths = [text_encoder1_path] + if text_encoder2_path is not None: + text_encoder_paths.append(text_encoder2_path) + + unet = ldm_patched.modules.sd.load_unet(unet_path) + + clip = None + if output_clip: + clip = ldm_patched.modules.sd.load_clip(text_encoder_paths, embedding_directory=embedding_directory) + + vae = None + if output_vae: + sd = ldm_patched.modules.utils.load_torch_file(vae_path) + vae = ldm_patched.modules.sd.VAE(sd=sd) + + return (unet, clip, vae) diff --git a/ldm_patched/modules/gligen.py b/ldm_patched/modules/gligen.py new file mode 100644 index 0000000000000000000000000000000000000000..11f1ee938de04236a4e8f5eb50448548d345e490 --- /dev/null +++ b/ldm_patched/modules/gligen.py @@ -0,0 +1,341 @@ +import torch +from torch import nn +from ldm_patched.ldm.modules.attention import CrossAttention +from inspect import isfunction + + +def exists(val): + return val is not None + + +def uniq(arr): + return{el: True for el in arr}.keys() + + +def default(val, d): + if exists(val): + return val + return d() if isfunction(d) else d + + +# feedforward +class GEGLU(nn.Module): + def __init__(self, dim_in, dim_out): + super().__init__() + self.proj = nn.Linear(dim_in, dim_out * 2) + + def forward(self, x): + x, gate = self.proj(x).chunk(2, dim=-1) + return x * torch.nn.functional.gelu(gate) + + +class FeedForward(nn.Module): + def __init__(self, dim, dim_out=None, mult=4, glu=False, dropout=0.): + super().__init__() + inner_dim = int(dim * mult) + dim_out = default(dim_out, dim) + project_in = nn.Sequential( + nn.Linear(dim, inner_dim), + nn.GELU() + ) if not glu else GEGLU(dim, inner_dim) + + self.net = nn.Sequential( + project_in, + nn.Dropout(dropout), + nn.Linear(inner_dim, dim_out) + ) + + def forward(self, x): + return self.net(x) + + +class GatedCrossAttentionDense(nn.Module): + def __init__(self, query_dim, context_dim, n_heads, d_head): + super().__init__() + + self.attn = CrossAttention( + query_dim=query_dim, + context_dim=context_dim, + heads=n_heads, + dim_head=d_head) + self.ff = FeedForward(query_dim, glu=True) + + self.norm1 = nn.LayerNorm(query_dim) + self.norm2 = nn.LayerNorm(query_dim) + + self.register_parameter('alpha_attn', nn.Parameter(torch.tensor(0.))) + self.register_parameter('alpha_dense', nn.Parameter(torch.tensor(0.))) + + # this can be useful: we can externally change magnitude of tanh(alpha) + # for example, when it is set to 0, then the entire model is same as + # original one + self.scale = 1 + + def forward(self, x, objs): + + x = x + self.scale * \ + torch.tanh(self.alpha_attn) * self.attn(self.norm1(x), objs, objs) + x = x + self.scale * \ + torch.tanh(self.alpha_dense) * self.ff(self.norm2(x)) + + return x + + +class GatedSelfAttentionDense(nn.Module): + def __init__(self, query_dim, context_dim, n_heads, d_head): + super().__init__() + + # we need a linear projection since we need cat visual feature and obj + # feature + self.linear = nn.Linear(context_dim, query_dim) + + self.attn = CrossAttention( + query_dim=query_dim, + context_dim=query_dim, + heads=n_heads, + dim_head=d_head) + self.ff = FeedForward(query_dim, glu=True) + + self.norm1 = nn.LayerNorm(query_dim) + self.norm2 = nn.LayerNorm(query_dim) + + self.register_parameter('alpha_attn', nn.Parameter(torch.tensor(0.))) + self.register_parameter('alpha_dense', nn.Parameter(torch.tensor(0.))) + + # this can be useful: we can externally change magnitude of tanh(alpha) + # for example, when it is set to 0, then the entire model is same as + # original one + self.scale = 1 + + def forward(self, x, objs): + + N_visual = x.shape[1] + objs = self.linear(objs) + + x = x + self.scale * torch.tanh(self.alpha_attn) * self.attn( + self.norm1(torch.cat([x, objs], dim=1)))[:, 0:N_visual, :] + x = x + self.scale * \ + torch.tanh(self.alpha_dense) * self.ff(self.norm2(x)) + + return x + + +class GatedSelfAttentionDense2(nn.Module): + def __init__(self, query_dim, context_dim, n_heads, d_head): + super().__init__() + + # we need a linear projection since we need cat visual feature and obj + # feature + self.linear = nn.Linear(context_dim, query_dim) + + self.attn = CrossAttention( + query_dim=query_dim, context_dim=query_dim, dim_head=d_head) + self.ff = FeedForward(query_dim, glu=True) + + self.norm1 = nn.LayerNorm(query_dim) + self.norm2 = nn.LayerNorm(query_dim) + + self.register_parameter('alpha_attn', nn.Parameter(torch.tensor(0.))) + self.register_parameter('alpha_dense', nn.Parameter(torch.tensor(0.))) + + # this can be useful: we can externally change magnitude of tanh(alpha) + # for example, when it is set to 0, then the entire model is same as + # original one + self.scale = 1 + + def forward(self, x, objs): + + B, N_visual, _ = x.shape + B, N_ground, _ = objs.shape + + objs = self.linear(objs) + + # sanity check + size_v = math.sqrt(N_visual) + size_g = math.sqrt(N_ground) + assert int(size_v) == size_v, "Visual tokens must be square rootable" + assert int(size_g) == size_g, "Grounding tokens must be square rootable" + size_v = int(size_v) + size_g = int(size_g) + + # select grounding token and resize it to visual token size as residual + out = self.attn(self.norm1(torch.cat([x, objs], dim=1)))[ + :, N_visual:, :] + out = out.permute(0, 2, 1).reshape(B, -1, size_g, size_g) + out = torch.nn.functional.interpolate( + out, (size_v, size_v), mode='bicubic') + residual = out.reshape(B, -1, N_visual).permute(0, 2, 1) + + # add residual to visual feature + x = x + self.scale * torch.tanh(self.alpha_attn) * residual + x = x + self.scale * \ + torch.tanh(self.alpha_dense) * self.ff(self.norm2(x)) + + return x + + +class FourierEmbedder(): + def __init__(self, num_freqs=64, temperature=100): + + self.num_freqs = num_freqs + self.temperature = temperature + self.freq_bands = temperature ** (torch.arange(num_freqs) / num_freqs) + + @torch.no_grad() + def __call__(self, x, cat_dim=-1): + "x: arbitrary shape of tensor. dim: cat dim" + out = [] + for freq in self.freq_bands: + out.append(torch.sin(freq * x)) + out.append(torch.cos(freq * x)) + return torch.cat(out, cat_dim) + + +class PositionNet(nn.Module): + def __init__(self, in_dim, out_dim, fourier_freqs=8): + super().__init__() + self.in_dim = in_dim + self.out_dim = out_dim + + self.fourier_embedder = FourierEmbedder(num_freqs=fourier_freqs) + self.position_dim = fourier_freqs * 2 * 4 # 2 is sin&cos, 4 is xyxy + + self.linears = nn.Sequential( + nn.Linear(self.in_dim + self.position_dim, 512), + nn.SiLU(), + nn.Linear(512, 512), + nn.SiLU(), + nn.Linear(512, out_dim), + ) + + self.null_positive_feature = torch.nn.Parameter( + torch.zeros([self.in_dim])) + self.null_position_feature = torch.nn.Parameter( + torch.zeros([self.position_dim])) + + def forward(self, boxes, masks, positive_embeddings): + B, N, _ = boxes.shape + dtype = self.linears[0].weight.dtype + masks = masks.unsqueeze(-1).to(dtype) + positive_embeddings = positive_embeddings.to(dtype) + + # embedding position (it may includes padding as placeholder) + xyxy_embedding = self.fourier_embedder(boxes.to(dtype)) # B*N*4 --> B*N*C + + # learnable null embedding + positive_null = self.null_positive_feature.view(1, 1, -1) + xyxy_null = self.null_position_feature.view(1, 1, -1) + + # replace padding with learnable null embedding + positive_embeddings = positive_embeddings * \ + masks + (1 - masks) * positive_null + xyxy_embedding = xyxy_embedding * masks + (1 - masks) * xyxy_null + + objs = self.linears( + torch.cat([positive_embeddings, xyxy_embedding], dim=-1)) + assert objs.shape == torch.Size([B, N, self.out_dim]) + return objs + + +class Gligen(nn.Module): + def __init__(self, modules, position_net, key_dim): + super().__init__() + self.module_list = nn.ModuleList(modules) + self.position_net = position_net + self.key_dim = key_dim + self.max_objs = 30 + self.current_device = torch.device("cpu") + + def _set_position(self, boxes, masks, positive_embeddings): + objs = self.position_net(boxes, masks, positive_embeddings) + def func(x, extra_options): + key = extra_options["transformer_index"] + module = self.module_list[key] + return module(x, objs) + return func + + def set_position(self, latent_image_shape, position_params, device): + batch, c, h, w = latent_image_shape + masks = torch.zeros([self.max_objs], device="cpu") + boxes = [] + positive_embeddings = [] + for p in position_params: + x1 = (p[4]) / w + y1 = (p[3]) / h + x2 = (p[4] + p[2]) / w + y2 = (p[3] + p[1]) / h + masks[len(boxes)] = 1.0 + boxes += [torch.tensor((x1, y1, x2, y2)).unsqueeze(0)] + positive_embeddings += [p[0]] + append_boxes = [] + append_conds = [] + if len(boxes) < self.max_objs: + append_boxes = [torch.zeros( + [self.max_objs - len(boxes), 4], device="cpu")] + append_conds = [torch.zeros( + [self.max_objs - len(boxes), self.key_dim], device="cpu")] + + box_out = torch.cat( + boxes + append_boxes).unsqueeze(0).repeat(batch, 1, 1) + masks = masks.unsqueeze(0).repeat(batch, 1) + conds = torch.cat(positive_embeddings + + append_conds).unsqueeze(0).repeat(batch, 1, 1) + return self._set_position( + box_out.to(device), + masks.to(device), + conds.to(device)) + + def set_empty(self, latent_image_shape, device): + batch, c, h, w = latent_image_shape + masks = torch.zeros([self.max_objs], device="cpu").repeat(batch, 1) + box_out = torch.zeros([self.max_objs, 4], + device="cpu").repeat(batch, 1, 1) + conds = torch.zeros([self.max_objs, self.key_dim], + device="cpu").repeat(batch, 1, 1) + return self._set_position( + box_out.to(device), + masks.to(device), + conds.to(device)) + + +def load_gligen(sd): + sd_k = sd.keys() + output_list = [] + key_dim = 768 + for a in ["input_blocks", "middle_block", "output_blocks"]: + for b in range(20): + k_temp = filter(lambda k: "{}.{}.".format(a, b) + in k and ".fuser." in k, sd_k) + k_temp = map(lambda k: (k, k.split(".fuser.")[-1]), k_temp) + + n_sd = {} + for k in k_temp: + n_sd[k[1]] = sd[k[0]] + if len(n_sd) > 0: + query_dim = n_sd["linear.weight"].shape[0] + key_dim = n_sd["linear.weight"].shape[1] + + if key_dim == 768: # SD1.x + n_heads = 8 + d_head = query_dim // n_heads + else: + d_head = 64 + n_heads = query_dim // d_head + + gated = GatedSelfAttentionDense( + query_dim, key_dim, n_heads, d_head) + gated.load_state_dict(n_sd, strict=False) + output_list.append(gated) + + if "position_net.null_positive_feature" in sd_k: + in_dim = sd["position_net.null_positive_feature"].shape[0] + out_dim = sd["position_net.linears.4.weight"].shape[0] + + class WeightsLoader(torch.nn.Module): + pass + w = WeightsLoader() + w.position_net = PositionNet(in_dim, out_dim) + w.load_state_dict(sd, strict=False) + + gligen = Gligen(output_list, w.position_net, key_dim) + return gligen diff --git a/ldm_patched/modules/latent_formats.py b/ldm_patched/modules/latent_formats.py new file mode 100644 index 0000000000000000000000000000000000000000..2252a075ed530fce5b6b6f81dfa7eede4b5395af --- /dev/null +++ b/ldm_patched/modules/latent_formats.py @@ -0,0 +1,39 @@ + +class LatentFormat: + scale_factor = 1.0 + latent_rgb_factors = None + taesd_decoder_name = None + + def process_in(self, latent): + return latent * self.scale_factor + + def process_out(self, latent): + return latent / self.scale_factor + +class SD15(LatentFormat): + def __init__(self, scale_factor=0.18215): + self.scale_factor = scale_factor + self.latent_rgb_factors = [ + # R G B + [ 0.3512, 0.2297, 0.3227], + [ 0.3250, 0.4974, 0.2350], + [-0.2829, 0.1762, 0.2721], + [-0.2120, -0.2616, -0.7177] + ] + self.taesd_decoder_name = "taesd_decoder" + +class SDXL(LatentFormat): + def __init__(self): + self.scale_factor = 0.13025 + self.latent_rgb_factors = [ + # R G B + [ 0.3920, 0.4054, 0.4549], + [-0.2634, -0.0196, 0.0653], + [ 0.0568, 0.1687, -0.0755], + [-0.3112, -0.2359, -0.2076] + ] + self.taesd_decoder_name = "taesdxl_decoder" + +class SD_X4(LatentFormat): + def __init__(self): + self.scale_factor = 0.08333 diff --git a/ldm_patched/modules/lora.py b/ldm_patched/modules/lora.py new file mode 100644 index 0000000000000000000000000000000000000000..cc5a29da82eb6ed48afbb1eeff89074edfb4d49d --- /dev/null +++ b/ldm_patched/modules/lora.py @@ -0,0 +1,224 @@ +import ldm_patched.modules.utils + +LORA_CLIP_MAP = { + "mlp.fc1": "mlp_fc1", + "mlp.fc2": "mlp_fc2", + "self_attn.k_proj": "self_attn_k_proj", + "self_attn.q_proj": "self_attn_q_proj", + "self_attn.v_proj": "self_attn_v_proj", + "self_attn.out_proj": "self_attn_out_proj", +} + + +def load_lora(lora, to_load): + patch_dict = {} + loaded_keys = set() + for x in to_load: + alpha_name = "{}.alpha".format(x) + alpha = None + if alpha_name in lora.keys(): + alpha = lora[alpha_name].item() + loaded_keys.add(alpha_name) + + regular_lora = "{}.lora_up.weight".format(x) + diffusers_lora = "{}_lora.up.weight".format(x) + transformers_lora = "{}.lora_linear_layer.up.weight".format(x) + A_name = None + + if regular_lora in lora.keys(): + A_name = regular_lora + B_name = "{}.lora_down.weight".format(x) + mid_name = "{}.lora_mid.weight".format(x) + elif diffusers_lora in lora.keys(): + A_name = diffusers_lora + B_name = "{}_lora.down.weight".format(x) + mid_name = None + elif transformers_lora in lora.keys(): + A_name = transformers_lora + B_name ="{}.lora_linear_layer.down.weight".format(x) + mid_name = None + + if A_name is not None: + mid = None + if mid_name is not None and mid_name in lora.keys(): + mid = lora[mid_name] + loaded_keys.add(mid_name) + patch_dict[to_load[x]] = ("lora", (lora[A_name], lora[B_name], alpha, mid)) + loaded_keys.add(A_name) + loaded_keys.add(B_name) + + + ######## loha + hada_w1_a_name = "{}.hada_w1_a".format(x) + hada_w1_b_name = "{}.hada_w1_b".format(x) + hada_w2_a_name = "{}.hada_w2_a".format(x) + hada_w2_b_name = "{}.hada_w2_b".format(x) + hada_t1_name = "{}.hada_t1".format(x) + hada_t2_name = "{}.hada_t2".format(x) + if hada_w1_a_name in lora.keys(): + hada_t1 = None + hada_t2 = None + if hada_t1_name in lora.keys(): + hada_t1 = lora[hada_t1_name] + hada_t2 = lora[hada_t2_name] + loaded_keys.add(hada_t1_name) + loaded_keys.add(hada_t2_name) + + patch_dict[to_load[x]] = ("loha", (lora[hada_w1_a_name], lora[hada_w1_b_name], alpha, lora[hada_w2_a_name], lora[hada_w2_b_name], hada_t1, hada_t2)) + loaded_keys.add(hada_w1_a_name) + loaded_keys.add(hada_w1_b_name) + loaded_keys.add(hada_w2_a_name) + loaded_keys.add(hada_w2_b_name) + + + ######## lokr + lokr_w1_name = "{}.lokr_w1".format(x) + lokr_w2_name = "{}.lokr_w2".format(x) + lokr_w1_a_name = "{}.lokr_w1_a".format(x) + lokr_w1_b_name = "{}.lokr_w1_b".format(x) + lokr_t2_name = "{}.lokr_t2".format(x) + lokr_w2_a_name = "{}.lokr_w2_a".format(x) + lokr_w2_b_name = "{}.lokr_w2_b".format(x) + + lokr_w1 = None + if lokr_w1_name in lora.keys(): + lokr_w1 = lora[lokr_w1_name] + loaded_keys.add(lokr_w1_name) + + lokr_w2 = None + if lokr_w2_name in lora.keys(): + lokr_w2 = lora[lokr_w2_name] + loaded_keys.add(lokr_w2_name) + + lokr_w1_a = None + if lokr_w1_a_name in lora.keys(): + lokr_w1_a = lora[lokr_w1_a_name] + loaded_keys.add(lokr_w1_a_name) + + lokr_w1_b = None + if lokr_w1_b_name in lora.keys(): + lokr_w1_b = lora[lokr_w1_b_name] + loaded_keys.add(lokr_w1_b_name) + + lokr_w2_a = None + if lokr_w2_a_name in lora.keys(): + lokr_w2_a = lora[lokr_w2_a_name] + loaded_keys.add(lokr_w2_a_name) + + lokr_w2_b = None + if lokr_w2_b_name in lora.keys(): + lokr_w2_b = lora[lokr_w2_b_name] + loaded_keys.add(lokr_w2_b_name) + + lokr_t2 = None + if lokr_t2_name in lora.keys(): + lokr_t2 = lora[lokr_t2_name] + loaded_keys.add(lokr_t2_name) + + if (lokr_w1 is not None) or (lokr_w2 is not None) or (lokr_w1_a is not None) or (lokr_w2_a is not None): + patch_dict[to_load[x]] = ("lokr", (lokr_w1, lokr_w2, alpha, lokr_w1_a, lokr_w1_b, lokr_w2_a, lokr_w2_b, lokr_t2)) + + #glora + a1_name = "{}.a1.weight".format(x) + a2_name = "{}.a2.weight".format(x) + b1_name = "{}.b1.weight".format(x) + b2_name = "{}.b2.weight".format(x) + if a1_name in lora: + patch_dict[to_load[x]] = ("glora", (lora[a1_name], lora[a2_name], lora[b1_name], lora[b2_name], alpha)) + loaded_keys.add(a1_name) + loaded_keys.add(a2_name) + loaded_keys.add(b1_name) + loaded_keys.add(b2_name) + + w_norm_name = "{}.w_norm".format(x) + b_norm_name = "{}.b_norm".format(x) + w_norm = lora.get(w_norm_name, None) + b_norm = lora.get(b_norm_name, None) + + if w_norm is not None: + loaded_keys.add(w_norm_name) + patch_dict[to_load[x]] = ("diff", (w_norm,)) + if b_norm is not None: + loaded_keys.add(b_norm_name) + patch_dict["{}.bias".format(to_load[x][:-len(".weight")])] = ("diff", (b_norm,)) + + diff_name = "{}.diff".format(x) + diff_weight = lora.get(diff_name, None) + if diff_weight is not None: + patch_dict[to_load[x]] = ("diff", (diff_weight,)) + loaded_keys.add(diff_name) + + diff_bias_name = "{}.diff_b".format(x) + diff_bias = lora.get(diff_bias_name, None) + if diff_bias is not None: + patch_dict["{}.bias".format(to_load[x][:-len(".weight")])] = ("diff", (diff_bias,)) + loaded_keys.add(diff_bias_name) + + for x in lora.keys(): + if x not in loaded_keys: + print("lora key not loaded", x) + return patch_dict + +def model_lora_keys_clip(model, key_map={}): + sdk = model.state_dict().keys() + + text_model_lora_key = "lora_te_text_model_encoder_layers_{}_{}" + clip_l_present = False + for b in range(32): #TODO: clean up + for c in LORA_CLIP_MAP: + k = "clip_h.transformer.text_model.encoder.layers.{}.{}.weight".format(b, c) + if k in sdk: + lora_key = text_model_lora_key.format(b, LORA_CLIP_MAP[c]) + key_map[lora_key] = k + lora_key = "lora_te1_text_model_encoder_layers_{}_{}".format(b, LORA_CLIP_MAP[c]) + key_map[lora_key] = k + lora_key = "text_encoder.text_model.encoder.layers.{}.{}".format(b, c) #diffusers lora + key_map[lora_key] = k + + k = "clip_l.transformer.text_model.encoder.layers.{}.{}.weight".format(b, c) + if k in sdk: + lora_key = text_model_lora_key.format(b, LORA_CLIP_MAP[c]) + key_map[lora_key] = k + lora_key = "lora_te1_text_model_encoder_layers_{}_{}".format(b, LORA_CLIP_MAP[c]) #SDXL base + key_map[lora_key] = k + clip_l_present = True + lora_key = "text_encoder.text_model.encoder.layers.{}.{}".format(b, c) #diffusers lora + key_map[lora_key] = k + + k = "clip_g.transformer.text_model.encoder.layers.{}.{}.weight".format(b, c) + if k in sdk: + if clip_l_present: + lora_key = "lora_te2_text_model_encoder_layers_{}_{}".format(b, LORA_CLIP_MAP[c]) #SDXL base + key_map[lora_key] = k + lora_key = "text_encoder_2.text_model.encoder.layers.{}.{}".format(b, c) #diffusers lora + key_map[lora_key] = k + else: + lora_key = "lora_te_text_model_encoder_layers_{}_{}".format(b, LORA_CLIP_MAP[c]) #TODO: test if this is correct for SDXL-Refiner + key_map[lora_key] = k + lora_key = "text_encoder.text_model.encoder.layers.{}.{}".format(b, c) #diffusers lora + key_map[lora_key] = k + + return key_map + +def model_lora_keys_unet(model, key_map={}): + sdk = model.state_dict().keys() + + for k in sdk: + if k.startswith("diffusion_model.") and k.endswith(".weight"): + key_lora = k[len("diffusion_model."):-len(".weight")].replace(".", "_") + key_map["lora_unet_{}".format(key_lora)] = k + + diffusers_keys = ldm_patched.modules.utils.unet_to_diffusers(model.model_config.unet_config) + for k in diffusers_keys: + if k.endswith(".weight"): + unet_key = "diffusion_model.{}".format(diffusers_keys[k]) + key_lora = k[:-len(".weight")].replace(".", "_") + key_map["lora_unet_{}".format(key_lora)] = unet_key + + diffusers_lora_prefix = ["", "unet."] + for p in diffusers_lora_prefix: + diffusers_lora_key = "{}{}".format(p, k[:-len(".weight")].replace(".to_", ".processor.to_")) + if diffusers_lora_key.endswith(".to_out.0"): + diffusers_lora_key = diffusers_lora_key[:-2] + key_map[diffusers_lora_key] = unet_key + return key_map diff --git a/ldm_patched/modules/model_base.py b/ldm_patched/modules/model_base.py new file mode 100644 index 0000000000000000000000000000000000000000..9c69e98b88d5eec2ffe02067d80a571926a112f6 --- /dev/null +++ b/ldm_patched/modules/model_base.py @@ -0,0 +1,425 @@ +import torch +from ldm_patched.ldm.modules.diffusionmodules.openaimodel import UNetModel, Timestep +from ldm_patched.ldm.modules.encoders.noise_aug_modules import CLIPEmbeddingNoiseAugmentation +from ldm_patched.ldm.modules.diffusionmodules.upscaling import ImageConcatWithNoiseAugmentation +import ldm_patched.modules.model_management +import ldm_patched.modules.conds +import ldm_patched.modules.ops +from enum import Enum +from . import utils + +class ModelType(Enum): + EPS = 1 + V_PREDICTION = 2 + V_PREDICTION_EDM = 3 + + +from ldm_patched.modules.model_sampling import EPS, V_PREDICTION, ModelSamplingDiscrete, ModelSamplingContinuousEDM + + +def model_sampling(model_config, model_type): + s = ModelSamplingDiscrete + + if model_type == ModelType.EPS: + c = EPS + elif model_type == ModelType.V_PREDICTION: + c = V_PREDICTION + elif model_type == ModelType.V_PREDICTION_EDM: + c = V_PREDICTION + s = ModelSamplingContinuousEDM + + class ModelSampling(s, c): + pass + + return ModelSampling(model_config) + + +class BaseModel(torch.nn.Module): + def __init__(self, model_config, model_type=ModelType.EPS, device=None): + super().__init__() + + unet_config = model_config.unet_config + self.latent_format = model_config.latent_format + self.model_config = model_config + self.manual_cast_dtype = model_config.manual_cast_dtype + + if not unet_config.get("disable_unet_model_creation", False): + if self.manual_cast_dtype is not None: + operations = ldm_patched.modules.ops.manual_cast + else: + operations = ldm_patched.modules.ops.disable_weight_init + self.diffusion_model = UNetModel(**unet_config, device=device, operations=operations) + self.model_type = model_type + self.model_sampling = model_sampling(model_config, model_type) + + self.adm_channels = unet_config.get("adm_in_channels", None) + if self.adm_channels is None: + self.adm_channels = 0 + self.inpaint_model = False + print("model_type", model_type.name) + print("UNet ADM Dimension", self.adm_channels) + + def apply_model(self, x, t, c_concat=None, c_crossattn=None, control=None, transformer_options={}, **kwargs): + sigma = t + xc = self.model_sampling.calculate_input(sigma, x) + if c_concat is not None: + xc = torch.cat([xc] + [c_concat], dim=1) + + context = c_crossattn + dtype = self.get_dtype() + + if self.manual_cast_dtype is not None: + dtype = self.manual_cast_dtype + + xc = xc.to(dtype) + t = self.model_sampling.timestep(t).float() + context = context.to(dtype) + extra_conds = {} + for o in kwargs: + extra = kwargs[o] + if hasattr(extra, "dtype"): + if extra.dtype != torch.int and extra.dtype != torch.long: + extra = extra.to(dtype) + extra_conds[o] = extra + + model_output = self.diffusion_model(xc, t, context=context, control=control, transformer_options=transformer_options, **extra_conds).float() + return self.model_sampling.calculate_denoised(sigma, model_output, x) + + def get_dtype(self): + return self.diffusion_model.dtype + + def is_adm(self): + return self.adm_channels > 0 + + def encode_adm(self, **kwargs): + return None + + def extra_conds(self, **kwargs): + out = {} + if self.inpaint_model: + concat_keys = ("mask", "masked_image") + cond_concat = [] + denoise_mask = kwargs.get("concat_mask", kwargs.get("denoise_mask", None)) + concat_latent_image = kwargs.get("concat_latent_image", None) + if concat_latent_image is None: + concat_latent_image = kwargs.get("latent_image", None) + else: + concat_latent_image = self.process_latent_in(concat_latent_image) + + noise = kwargs.get("noise", None) + device = kwargs["device"] + + if concat_latent_image.shape[1:] != noise.shape[1:]: + concat_latent_image = utils.common_upscale(concat_latent_image, noise.shape[-1], noise.shape[-2], "bilinear", "center") + + concat_latent_image = utils.resize_to_batch_size(concat_latent_image, noise.shape[0]) + + if len(denoise_mask.shape) == len(noise.shape): + denoise_mask = denoise_mask[:,:1] + + denoise_mask = denoise_mask.reshape((-1, 1, denoise_mask.shape[-2], denoise_mask.shape[-1])) + if denoise_mask.shape[-2:] != noise.shape[-2:]: + denoise_mask = utils.common_upscale(denoise_mask, noise.shape[-1], noise.shape[-2], "bilinear", "center") + denoise_mask = utils.resize_to_batch_size(denoise_mask.round(), noise.shape[0]) + + def blank_inpaint_image_like(latent_image): + blank_image = torch.ones_like(latent_image) + # these are the values for "zero" in pixel space translated to latent space + blank_image[:,0] *= 0.8223 + blank_image[:,1] *= -0.6876 + blank_image[:,2] *= 0.6364 + blank_image[:,3] *= 0.1380 + return blank_image + + for ck in concat_keys: + if denoise_mask is not None: + if ck == "mask": + cond_concat.append(denoise_mask.to(device)) + elif ck == "masked_image": + cond_concat.append(concat_latent_image.to(device)) #NOTE: the latent_image should be masked by the mask in pixel space + else: + if ck == "mask": + cond_concat.append(torch.ones_like(noise)[:,:1]) + elif ck == "masked_image": + cond_concat.append(blank_inpaint_image_like(noise)) + data = torch.cat(cond_concat, dim=1) + out['c_concat'] = ldm_patched.modules.conds.CONDNoiseShape(data) + + adm = self.encode_adm(**kwargs) + if adm is not None: + out['y'] = ldm_patched.modules.conds.CONDRegular(adm) + + cross_attn = kwargs.get("cross_attn", None) + if cross_attn is not None: + out['c_crossattn'] = ldm_patched.modules.conds.CONDCrossAttn(cross_attn) + + return out + + def load_model_weights(self, sd, unet_prefix=""): + to_load = {} + keys = list(sd.keys()) + for k in keys: + if k.startswith(unet_prefix): + to_load[k[len(unet_prefix):]] = sd.pop(k) + + to_load = self.model_config.process_unet_state_dict(to_load) + m, u = self.diffusion_model.load_state_dict(to_load, strict=False) + if len(m) > 0: + print("unet missing:", m) + + if len(u) > 0: + print("unet unexpected:", u) + del to_load + return self + + def process_latent_in(self, latent): + return self.latent_format.process_in(latent) + + def process_latent_out(self, latent): + return self.latent_format.process_out(latent) + + def state_dict_for_saving(self, clip_state_dict=None, vae_state_dict=None, clip_vision_state_dict=None): + extra_sds = [] + if clip_state_dict is not None: + extra_sds.append(self.model_config.process_clip_state_dict_for_saving(clip_state_dict)) + if vae_state_dict is not None: + extra_sds.append(self.model_config.process_vae_state_dict_for_saving(vae_state_dict)) + if clip_vision_state_dict is not None: + extra_sds.append(self.model_config.process_clip_vision_state_dict_for_saving(clip_vision_state_dict)) + + unet_state_dict = self.diffusion_model.state_dict() + unet_state_dict = self.model_config.process_unet_state_dict_for_saving(unet_state_dict) + + if self.get_dtype() == torch.float16: + extra_sds = map(lambda sd: utils.convert_sd_to(sd, torch.float16), extra_sds) + + if self.model_type == ModelType.V_PREDICTION: + unet_state_dict["v_pred"] = torch.tensor([]) + + for sd in extra_sds: + unet_state_dict.update(sd) + + return unet_state_dict + + def set_inpaint(self): + self.inpaint_model = True + + def memory_required(self, input_shape): + if ldm_patched.modules.model_management.xformers_enabled() or ldm_patched.modules.model_management.pytorch_attention_flash_attention(): + dtype = self.get_dtype() + if self.manual_cast_dtype is not None: + dtype = self.manual_cast_dtype + #TODO: this needs to be tweaked + area = input_shape[0] * input_shape[2] * input_shape[3] + return (area * ldm_patched.modules.model_management.dtype_size(dtype) / 50) * (1024 * 1024) + else: + #TODO: this formula might be too aggressive since I tweaked the sub-quad and split algorithms to use less memory. + area = input_shape[0] * input_shape[2] * input_shape[3] + return (((area * 0.6) / 0.9) + 1024) * (1024 * 1024) + + +def unclip_adm(unclip_conditioning, device, noise_augmentor, noise_augment_merge=0.0, seed=None): + adm_inputs = [] + weights = [] + noise_aug = [] + for unclip_cond in unclip_conditioning: + for adm_cond in unclip_cond["clip_vision_output"].image_embeds: + weight = unclip_cond["strength"] + noise_augment = unclip_cond["noise_augmentation"] + noise_level = round((noise_augmentor.max_noise_level - 1) * noise_augment) + c_adm, noise_level_emb = noise_augmentor(adm_cond.to(device), noise_level=torch.tensor([noise_level], device=device), seed=seed) + adm_out = torch.cat((c_adm, noise_level_emb), 1) * weight + weights.append(weight) + noise_aug.append(noise_augment) + adm_inputs.append(adm_out) + + if len(noise_aug) > 1: + adm_out = torch.stack(adm_inputs).sum(0) + noise_augment = noise_augment_merge + noise_level = round((noise_augmentor.max_noise_level - 1) * noise_augment) + c_adm, noise_level_emb = noise_augmentor(adm_out[:, :noise_augmentor.time_embed.dim], noise_level=torch.tensor([noise_level], device=device)) + adm_out = torch.cat((c_adm, noise_level_emb), 1) + + return adm_out + +class SD21UNCLIP(BaseModel): + def __init__(self, model_config, noise_aug_config, model_type=ModelType.V_PREDICTION, device=None): + super().__init__(model_config, model_type, device=device) + self.noise_augmentor = CLIPEmbeddingNoiseAugmentation(**noise_aug_config) + + def encode_adm(self, **kwargs): + unclip_conditioning = kwargs.get("unclip_conditioning", None) + device = kwargs["device"] + if unclip_conditioning is None: + return torch.zeros((1, self.adm_channels)) + else: + return unclip_adm(unclip_conditioning, device, self.noise_augmentor, kwargs.get("unclip_noise_augment_merge", 0.05), kwargs.get("seed", 0) - 10) + +def sdxl_pooled(args, noise_augmentor): + if "unclip_conditioning" in args: + return unclip_adm(args.get("unclip_conditioning", None), args["device"], noise_augmentor, seed=args.get("seed", 0) - 10)[:,:1280] + else: + return args["pooled_output"] + +class SDXLRefiner(BaseModel): + def __init__(self, model_config, model_type=ModelType.EPS, device=None): + super().__init__(model_config, model_type, device=device) + self.embedder = Timestep(256) + self.noise_augmentor = CLIPEmbeddingNoiseAugmentation(**{"noise_schedule_config": {"timesteps": 1000, "beta_schedule": "squaredcos_cap_v2"}, "timestep_dim": 1280}) + + def encode_adm(self, **kwargs): + clip_pooled = sdxl_pooled(kwargs, self.noise_augmentor) + width = kwargs.get("width", 768) + height = kwargs.get("height", 768) + crop_w = kwargs.get("crop_w", 0) + crop_h = kwargs.get("crop_h", 0) + + if kwargs.get("prompt_type", "") == "negative": + aesthetic_score = kwargs.get("aesthetic_score", 2.5) + else: + aesthetic_score = kwargs.get("aesthetic_score", 6) + + out = [] + out.append(self.embedder(torch.Tensor([height]))) + out.append(self.embedder(torch.Tensor([width]))) + out.append(self.embedder(torch.Tensor([crop_h]))) + out.append(self.embedder(torch.Tensor([crop_w]))) + out.append(self.embedder(torch.Tensor([aesthetic_score]))) + flat = torch.flatten(torch.cat(out)).unsqueeze(dim=0).repeat(clip_pooled.shape[0], 1) + return torch.cat((clip_pooled.to(flat.device), flat), dim=1) + +class SDXL(BaseModel): + def __init__(self, model_config, model_type=ModelType.EPS, device=None): + super().__init__(model_config, model_type, device=device) + self.embedder = Timestep(256) + self.noise_augmentor = CLIPEmbeddingNoiseAugmentation(**{"noise_schedule_config": {"timesteps": 1000, "beta_schedule": "squaredcos_cap_v2"}, "timestep_dim": 1280}) + + def encode_adm(self, **kwargs): + clip_pooled = sdxl_pooled(kwargs, self.noise_augmentor) + width = kwargs.get("width", 768) + height = kwargs.get("height", 768) + crop_w = kwargs.get("crop_w", 0) + crop_h = kwargs.get("crop_h", 0) + target_width = kwargs.get("target_width", width) + target_height = kwargs.get("target_height", height) + + out = [] + out.append(self.embedder(torch.Tensor([height]))) + out.append(self.embedder(torch.Tensor([width]))) + out.append(self.embedder(torch.Tensor([crop_h]))) + out.append(self.embedder(torch.Tensor([crop_w]))) + out.append(self.embedder(torch.Tensor([target_height]))) + out.append(self.embedder(torch.Tensor([target_width]))) + flat = torch.flatten(torch.cat(out)).unsqueeze(dim=0).repeat(clip_pooled.shape[0], 1) + return torch.cat((clip_pooled.to(flat.device), flat), dim=1) + +class SVD_img2vid(BaseModel): + def __init__(self, model_config, model_type=ModelType.V_PREDICTION_EDM, device=None): + super().__init__(model_config, model_type, device=device) + self.embedder = Timestep(256) + + def encode_adm(self, **kwargs): + fps_id = kwargs.get("fps", 6) - 1 + motion_bucket_id = kwargs.get("motion_bucket_id", 127) + augmentation = kwargs.get("augmentation_level", 0) + + out = [] + out.append(self.embedder(torch.Tensor([fps_id]))) + out.append(self.embedder(torch.Tensor([motion_bucket_id]))) + out.append(self.embedder(torch.Tensor([augmentation]))) + + flat = torch.flatten(torch.cat(out)).unsqueeze(dim=0) + return flat + + def extra_conds(self, **kwargs): + out = {} + adm = self.encode_adm(**kwargs) + if adm is not None: + out['y'] = ldm_patched.modules.conds.CONDRegular(adm) + + latent_image = kwargs.get("concat_latent_image", None) + noise = kwargs.get("noise", None) + device = kwargs["device"] + + if latent_image is None: + latent_image = torch.zeros_like(noise) + + if latent_image.shape[1:] != noise.shape[1:]: + latent_image = utils.common_upscale(latent_image, noise.shape[-1], noise.shape[-2], "bilinear", "center") + + latent_image = utils.resize_to_batch_size(latent_image, noise.shape[0]) + + out['c_concat'] = ldm_patched.modules.conds.CONDNoiseShape(latent_image) + + cross_attn = kwargs.get("cross_attn", None) + if cross_attn is not None: + out['c_crossattn'] = ldm_patched.modules.conds.CONDCrossAttn(cross_attn) + + if "time_conditioning" in kwargs: + out["time_context"] = ldm_patched.modules.conds.CONDCrossAttn(kwargs["time_conditioning"]) + + out['image_only_indicator'] = ldm_patched.modules.conds.CONDConstant(torch.zeros((1,), device=device)) + out['num_video_frames'] = ldm_patched.modules.conds.CONDConstant(noise.shape[0]) + return out + +class Stable_Zero123(BaseModel): + def __init__(self, model_config, model_type=ModelType.EPS, device=None, cc_projection_weight=None, cc_projection_bias=None): + super().__init__(model_config, model_type, device=device) + self.cc_projection = ldm_patched.modules.ops.manual_cast.Linear(cc_projection_weight.shape[1], cc_projection_weight.shape[0], dtype=self.get_dtype(), device=device) + self.cc_projection.weight.copy_(cc_projection_weight) + self.cc_projection.bias.copy_(cc_projection_bias) + + def extra_conds(self, **kwargs): + out = {} + + latent_image = kwargs.get("concat_latent_image", None) + noise = kwargs.get("noise", None) + + if latent_image is None: + latent_image = torch.zeros_like(noise) + + if latent_image.shape[1:] != noise.shape[1:]: + latent_image = utils.common_upscale(latent_image, noise.shape[-1], noise.shape[-2], "bilinear", "center") + + latent_image = utils.resize_to_batch_size(latent_image, noise.shape[0]) + + out['c_concat'] = ldm_patched.modules.conds.CONDNoiseShape(latent_image) + + cross_attn = kwargs.get("cross_attn", None) + if cross_attn is not None: + if cross_attn.shape[-1] != 768: + cross_attn = self.cc_projection(cross_attn) + out['c_crossattn'] = ldm_patched.modules.conds.CONDCrossAttn(cross_attn) + return out + +class SD_X4Upscaler(BaseModel): + def __init__(self, model_config, model_type=ModelType.V_PREDICTION, device=None): + super().__init__(model_config, model_type, device=device) + self.noise_augmentor = ImageConcatWithNoiseAugmentation(noise_schedule_config={"linear_start": 0.0001, "linear_end": 0.02}, max_noise_level=350) + + def extra_conds(self, **kwargs): + out = {} + + image = kwargs.get("concat_image", None) + noise = kwargs.get("noise", None) + noise_augment = kwargs.get("noise_augmentation", 0.0) + device = kwargs["device"] + seed = kwargs["seed"] - 10 + + noise_level = round((self.noise_augmentor.max_noise_level) * noise_augment) + + if image is None: + image = torch.zeros_like(noise)[:,:3] + + if image.shape[1:] != noise.shape[1:]: + image = utils.common_upscale(image.to(device), noise.shape[-1], noise.shape[-2], "bilinear", "center") + + noise_level = torch.tensor([noise_level], device=device) + if noise_augment > 0: + image, noise_level = self.noise_augmentor(image.to(device), noise_level=noise_level, seed=seed) + + image = utils.resize_to_batch_size(image, noise.shape[0]) + + out['c_concat'] = ldm_patched.modules.conds.CONDNoiseShape(image) + out['y'] = ldm_patched.modules.conds.CONDRegular(noise_level) + return out diff --git a/ldm_patched/modules/model_detection.py b/ldm_patched/modules/model_detection.py new file mode 100644 index 0000000000000000000000000000000000000000..126386ca80e236e8205a3e8918c24923bfe23e87 --- /dev/null +++ b/ldm_patched/modules/model_detection.py @@ -0,0 +1,320 @@ +import ldm_patched.modules.supported_models +import ldm_patched.modules.supported_models_base + +def count_blocks(state_dict_keys, prefix_string): + count = 0 + while True: + c = False + for k in state_dict_keys: + if k.startswith(prefix_string.format(count)): + c = True + break + if c == False: + break + count += 1 + return count + +def calculate_transformer_depth(prefix, state_dict_keys, state_dict): + context_dim = None + use_linear_in_transformer = False + + transformer_prefix = prefix + "1.transformer_blocks." + transformer_keys = sorted(list(filter(lambda a: a.startswith(transformer_prefix), state_dict_keys))) + if len(transformer_keys) > 0: + last_transformer_depth = count_blocks(state_dict_keys, transformer_prefix + '{}') + context_dim = state_dict['{}0.attn2.to_k.weight'.format(transformer_prefix)].shape[1] + use_linear_in_transformer = len(state_dict['{}1.proj_in.weight'.format(prefix)].shape) == 2 + time_stack = '{}1.time_stack.0.attn1.to_q.weight'.format(prefix) in state_dict or '{}1.time_mix_blocks.0.attn1.to_q.weight'.format(prefix) in state_dict + return last_transformer_depth, context_dim, use_linear_in_transformer, time_stack + return None + +def detect_unet_config(state_dict, key_prefix, dtype): + state_dict_keys = list(state_dict.keys()) + + unet_config = { + "use_checkpoint": False, + "image_size": 32, + "use_spatial_transformer": True, + "legacy": False + } + + y_input = '{}label_emb.0.0.weight'.format(key_prefix) + if y_input in state_dict_keys: + unet_config["num_classes"] = "sequential" + unet_config["adm_in_channels"] = state_dict[y_input].shape[1] + else: + unet_config["adm_in_channels"] = None + + unet_config["dtype"] = dtype + model_channels = state_dict['{}input_blocks.0.0.weight'.format(key_prefix)].shape[0] + in_channels = state_dict['{}input_blocks.0.0.weight'.format(key_prefix)].shape[1] + + out_key = '{}out.2.weight'.format(key_prefix) + if out_key in state_dict: + out_channels = state_dict[out_key].shape[0] + else: + out_channels = 4 + + num_res_blocks = [] + channel_mult = [] + attention_resolutions = [] + transformer_depth = [] + transformer_depth_output = [] + context_dim = None + use_linear_in_transformer = False + + video_model = False + + current_res = 1 + count = 0 + + last_res_blocks = 0 + last_channel_mult = 0 + + input_block_count = count_blocks(state_dict_keys, '{}input_blocks'.format(key_prefix) + '.{}.') + for count in range(input_block_count): + prefix = '{}input_blocks.{}.'.format(key_prefix, count) + prefix_output = '{}output_blocks.{}.'.format(key_prefix, input_block_count - count - 1) + + block_keys = sorted(list(filter(lambda a: a.startswith(prefix), state_dict_keys))) + if len(block_keys) == 0: + break + + block_keys_output = sorted(list(filter(lambda a: a.startswith(prefix_output), state_dict_keys))) + + if "{}0.op.weight".format(prefix) in block_keys: #new layer + num_res_blocks.append(last_res_blocks) + channel_mult.append(last_channel_mult) + + current_res *= 2 + last_res_blocks = 0 + last_channel_mult = 0 + out = calculate_transformer_depth(prefix_output, state_dict_keys, state_dict) + if out is not None: + transformer_depth_output.append(out[0]) + else: + transformer_depth_output.append(0) + else: + res_block_prefix = "{}0.in_layers.0.weight".format(prefix) + if res_block_prefix in block_keys: + last_res_blocks += 1 + last_channel_mult = state_dict["{}0.out_layers.3.weight".format(prefix)].shape[0] // model_channels + + out = calculate_transformer_depth(prefix, state_dict_keys, state_dict) + if out is not None: + transformer_depth.append(out[0]) + if context_dim is None: + context_dim = out[1] + use_linear_in_transformer = out[2] + video_model = out[3] + else: + transformer_depth.append(0) + + res_block_prefix = "{}0.in_layers.0.weight".format(prefix_output) + if res_block_prefix in block_keys_output: + out = calculate_transformer_depth(prefix_output, state_dict_keys, state_dict) + if out is not None: + transformer_depth_output.append(out[0]) + else: + transformer_depth_output.append(0) + + + num_res_blocks.append(last_res_blocks) + channel_mult.append(last_channel_mult) + if "{}middle_block.1.proj_in.weight".format(key_prefix) in state_dict_keys: + transformer_depth_middle = count_blocks(state_dict_keys, '{}middle_block.1.transformer_blocks.'.format(key_prefix) + '{}') + else: + transformer_depth_middle = -1 + + unet_config["in_channels"] = in_channels + unet_config["out_channels"] = out_channels + unet_config["model_channels"] = model_channels + unet_config["num_res_blocks"] = num_res_blocks + unet_config["transformer_depth"] = transformer_depth + unet_config["transformer_depth_output"] = transformer_depth_output + unet_config["channel_mult"] = channel_mult + unet_config["transformer_depth_middle"] = transformer_depth_middle + unet_config['use_linear_in_transformer'] = use_linear_in_transformer + unet_config["context_dim"] = context_dim + + if video_model: + unet_config["extra_ff_mix_layer"] = True + unet_config["use_spatial_context"] = True + unet_config["merge_strategy"] = "learned_with_images" + unet_config["merge_factor"] = 0.0 + unet_config["video_kernel_size"] = [3, 1, 1] + unet_config["use_temporal_resblock"] = True + unet_config["use_temporal_attention"] = True + else: + unet_config["use_temporal_resblock"] = False + unet_config["use_temporal_attention"] = False + + return unet_config + +def model_config_from_unet_config(unet_config): + for model_config in ldm_patched.modules.supported_models.models: + if model_config.matches(unet_config): + return model_config(unet_config) + + print("no match", unet_config) + return None + +def model_config_from_unet(state_dict, unet_key_prefix, dtype, use_base_if_no_match=False): + unet_config = detect_unet_config(state_dict, unet_key_prefix, dtype) + model_config = model_config_from_unet_config(unet_config) + if model_config is None and use_base_if_no_match: + return ldm_patched.modules.supported_models_base.BASE(unet_config) + else: + return model_config + +def convert_config(unet_config): + new_config = unet_config.copy() + num_res_blocks = new_config.get("num_res_blocks", None) + channel_mult = new_config.get("channel_mult", None) + + if isinstance(num_res_blocks, int): + num_res_blocks = len(channel_mult) * [num_res_blocks] + + if "attention_resolutions" in new_config: + attention_resolutions = new_config.pop("attention_resolutions") + transformer_depth = new_config.get("transformer_depth", None) + transformer_depth_middle = new_config.get("transformer_depth_middle", None) + + if isinstance(transformer_depth, int): + transformer_depth = len(channel_mult) * [transformer_depth] + if transformer_depth_middle is None: + transformer_depth_middle = transformer_depth[-1] + t_in = [] + t_out = [] + s = 1 + for i in range(len(num_res_blocks)): + res = num_res_blocks[i] + d = 0 + if s in attention_resolutions: + d = transformer_depth[i] + + t_in += [d] * res + t_out += [d] * (res + 1) + s *= 2 + transformer_depth = t_in + transformer_depth_output = t_out + new_config["transformer_depth"] = t_in + new_config["transformer_depth_output"] = t_out + new_config["transformer_depth_middle"] = transformer_depth_middle + + new_config["num_res_blocks"] = num_res_blocks + return new_config + + +def unet_config_from_diffusers_unet(state_dict, dtype): + match = {} + transformer_depth = [] + + attn_res = 1 + down_blocks = count_blocks(state_dict, "down_blocks.{}") + for i in range(down_blocks): + attn_blocks = count_blocks(state_dict, "down_blocks.{}.attentions.".format(i) + '{}') + for ab in range(attn_blocks): + transformer_count = count_blocks(state_dict, "down_blocks.{}.attentions.{}.transformer_blocks.".format(i, ab) + '{}') + transformer_depth.append(transformer_count) + if transformer_count > 0: + match["context_dim"] = state_dict["down_blocks.{}.attentions.{}.transformer_blocks.0.attn2.to_k.weight".format(i, ab)].shape[1] + + attn_res *= 2 + if attn_blocks == 0: + transformer_depth.append(0) + transformer_depth.append(0) + + match["transformer_depth"] = transformer_depth + + match["model_channels"] = state_dict["conv_in.weight"].shape[0] + match["in_channels"] = state_dict["conv_in.weight"].shape[1] + match["adm_in_channels"] = None + if "class_embedding.linear_1.weight" in state_dict: + match["adm_in_channels"] = state_dict["class_embedding.linear_1.weight"].shape[1] + elif "add_embedding.linear_1.weight" in state_dict: + match["adm_in_channels"] = state_dict["add_embedding.linear_1.weight"].shape[1] + + SDXL = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 2816, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, + 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 2, 2, 10, 10], 'channel_mult': [1, 2, 4], 'transformer_depth_middle': 10, + 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, 'transformer_depth_output': [0, 0, 0, 2, 2, 2, 10, 10, 10], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + SDXL_refiner = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 2560, 'dtype': dtype, 'in_channels': 4, 'model_channels': 384, + 'num_res_blocks': [2, 2, 2, 2], 'transformer_depth': [0, 0, 4, 4, 4, 4, 0, 0], 'channel_mult': [1, 2, 4, 4], 'transformer_depth_middle': 4, + 'use_linear_in_transformer': True, 'context_dim': 1280, 'num_head_channels': 64, 'transformer_depth_output': [0, 0, 0, 4, 4, 4, 4, 4, 4, 0, 0, 0], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + SD21 = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'adm_in_channels': None, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, 'num_res_blocks': [2, 2, 2, 2], + 'transformer_depth': [1, 1, 1, 1, 1, 1, 0, 0], 'channel_mult': [1, 2, 4, 4], 'transformer_depth_middle': 1, 'use_linear_in_transformer': True, + 'context_dim': 1024, 'num_head_channels': 64, 'transformer_depth_output': [1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + SD21_uncliph = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 2048, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, + 'num_res_blocks': [2, 2, 2, 2], 'transformer_depth': [1, 1, 1, 1, 1, 1, 0, 0], 'channel_mult': [1, 2, 4, 4], 'transformer_depth_middle': 1, + 'use_linear_in_transformer': True, 'context_dim': 1024, 'num_head_channels': 64, 'transformer_depth_output': [1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + SD21_unclipl = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 1536, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, + 'num_res_blocks': [2, 2, 2, 2], 'transformer_depth': [1, 1, 1, 1, 1, 1, 0, 0], 'channel_mult': [1, 2, 4, 4], 'transformer_depth_middle': 1, + 'use_linear_in_transformer': True, 'context_dim': 1024, 'num_head_channels': 64, 'transformer_depth_output': [1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + SD15 = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, 'adm_in_channels': None, + 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, 'num_res_blocks': [2, 2, 2, 2], 'transformer_depth': [1, 1, 1, 1, 1, 1, 0, 0], + 'channel_mult': [1, 2, 4, 4], 'transformer_depth_middle': 1, 'use_linear_in_transformer': False, 'context_dim': 768, 'num_heads': 8, + 'transformer_depth_output': [1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + SDXL_mid_cnet = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 2816, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, + 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 0, 0, 1, 1], 'channel_mult': [1, 2, 4], 'transformer_depth_middle': 1, + 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, 'transformer_depth_output': [0, 0, 0, 0, 0, 0, 1, 1, 1], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + SDXL_small_cnet = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 2816, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, + 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 0, 0, 0, 0], 'channel_mult': [1, 2, 4], 'transformer_depth_middle': 0, + 'use_linear_in_transformer': True, 'num_head_channels': 64, 'context_dim': 1, 'transformer_depth_output': [0, 0, 0, 0, 0, 0, 0, 0, 0], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + SDXL_diffusers_inpaint = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 2816, 'dtype': dtype, 'in_channels': 9, 'model_channels': 320, + 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 2, 2, 10, 10], 'channel_mult': [1, 2, 4], 'transformer_depth_middle': 10, + 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, 'transformer_depth_output': [0, 0, 0, 2, 2, 2, 10, 10, 10], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + SSD_1B = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 2816, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, + 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 2, 2, 4, 4], 'transformer_depth_output': [0, 0, 0, 1, 1, 2, 10, 4, 4], + 'channel_mult': [1, 2, 4], 'transformer_depth_middle': -1, 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + Segmind_Vega = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 2816, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, + 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 1, 1, 2, 2], 'transformer_depth_output': [0, 0, 0, 1, 1, 1, 2, 2, 2], + 'channel_mult': [1, 2, 4], 'transformer_depth_middle': -1, 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + supported_models = [SDXL, SDXL_refiner, SD21, SD15, SD21_uncliph, SD21_unclipl, SDXL_mid_cnet, SDXL_small_cnet, SDXL_diffusers_inpaint, SSD_1B, Segmind_Vega] + + for unet_config in supported_models: + matches = True + for k in match: + if match[k] != unet_config[k]: + matches = False + break + if matches: + return convert_config(unet_config) + return None + +def model_config_from_diffusers_unet(state_dict, dtype): + unet_config = unet_config_from_diffusers_unet(state_dict, dtype) + if unet_config is not None: + return model_config_from_unet_config(unet_config) + return None diff --git a/ldm_patched/modules/model_management.py b/ldm_patched/modules/model_management.py new file mode 100644 index 0000000000000000000000000000000000000000..840d79a07b607818ddc8596a9128ca6b7b0c2f2b --- /dev/null +++ b/ldm_patched/modules/model_management.py @@ -0,0 +1,807 @@ +import psutil +from enum import Enum +from ldm_patched.modules.args_parser import args +import ldm_patched.modules.utils +import torch +import sys + +class VRAMState(Enum): + DISABLED = 0 #No vram present: no need to move models to vram + NO_VRAM = 1 #Very low vram: enable all the options to save vram + LOW_VRAM = 2 + NORMAL_VRAM = 3 + HIGH_VRAM = 4 + SHARED = 5 #No dedicated vram: memory shared between CPU and GPU but models still need to be moved between both. + +class CPUState(Enum): + GPU = 0 + CPU = 1 + MPS = 2 + +# Determine VRAM State +vram_state = VRAMState.NORMAL_VRAM +set_vram_to = VRAMState.NORMAL_VRAM +cpu_state = CPUState.GPU + +total_vram = 0 + +lowvram_available = True +xpu_available = False + +if args.pytorch_deterministic: + print("Using deterministic algorithms for pytorch") + torch.use_deterministic_algorithms(True, warn_only=True) + +directml_enabled = False +if args.directml is not None: + import torch_directml + directml_enabled = True + device_index = args.directml + if device_index < 0: + directml_device = torch_directml.device() + else: + directml_device = torch_directml.device(device_index) + print("Using directml with device:", torch_directml.device_name(device_index)) + # torch_directml.disable_tiled_resources(True) + lowvram_available = False #TODO: need to find a way to get free memory in directml before this can be enabled by default. + +try: + import intel_extension_for_pytorch as ipex + if torch.xpu.is_available(): + xpu_available = True +except: + pass + +try: + if torch.backends.mps.is_available(): + cpu_state = CPUState.MPS + import torch.mps +except: + pass + +if args.always_cpu: + if args.always_cpu > 0: + torch.set_num_threads(args.always_cpu) + print(f"Running on {torch.get_num_threads()} CPU threads") + cpu_state = CPUState.CPU + +def is_intel_xpu(): + global cpu_state + global xpu_available + if cpu_state == CPUState.GPU: + if xpu_available: + return True + return False + +def get_torch_device(): + global directml_enabled + global cpu_state + if directml_enabled: + global directml_device + return directml_device + if cpu_state == CPUState.MPS: + return torch.device("mps") + if cpu_state == CPUState.CPU: + return torch.device("cpu") + else: + if is_intel_xpu(): + return torch.device("xpu") + else: + return torch.device(torch.cuda.current_device()) + +def get_total_memory(dev=None, torch_total_too=False): + global directml_enabled + if dev is None: + dev = get_torch_device() + + if hasattr(dev, 'type') and (dev.type == 'cpu' or dev.type == 'mps'): + mem_total = psutil.virtual_memory().total + mem_total_torch = mem_total + else: + if directml_enabled: + mem_total = 1024 * 1024 * 1024 #TODO + mem_total_torch = mem_total + elif is_intel_xpu(): + stats = torch.xpu.memory_stats(dev) + mem_reserved = stats['reserved_bytes.all.current'] + mem_total = torch.xpu.get_device_properties(dev).total_memory + mem_total_torch = mem_reserved + else: + stats = torch.cuda.memory_stats(dev) + mem_reserved = stats['reserved_bytes.all.current'] + _, mem_total_cuda = torch.cuda.mem_get_info(dev) + mem_total_torch = mem_reserved + mem_total = mem_total_cuda + + if torch_total_too: + return (mem_total, mem_total_torch) + else: + return mem_total + +total_vram = get_total_memory(get_torch_device()) / (1024 * 1024) +total_ram = psutil.virtual_memory().total / (1024 * 1024) +print("Total VRAM {:0.0f} MB, total RAM {:0.0f} MB".format(total_vram, total_ram)) +if not args.always_normal_vram and not args.always_cpu: + if lowvram_available and total_vram <= 4096: + print("Trying to enable lowvram mode because your GPU seems to have 4GB or less. If you don't want this use: --always-normal-vram") + set_vram_to = VRAMState.LOW_VRAM + +try: + OOM_EXCEPTION = torch.cuda.OutOfMemoryError +except: + OOM_EXCEPTION = Exception + +XFORMERS_VERSION = "" +XFORMERS_ENABLED_VAE = True +if args.disable_xformers: + XFORMERS_IS_AVAILABLE = False +else: + try: + import xformers + import xformers.ops + XFORMERS_IS_AVAILABLE = True + try: + XFORMERS_IS_AVAILABLE = xformers._has_cpp_library + except: + pass + try: + XFORMERS_VERSION = xformers.version.__version__ + print("xformers version:", XFORMERS_VERSION) + if XFORMERS_VERSION.startswith("0.0.18"): + print() + print("WARNING: This version of xformers has a major bug where you will get black images when generating high resolution images.") + print("Please downgrade or upgrade xformers to a different version.") + print() + XFORMERS_ENABLED_VAE = False + except: + pass + except: + XFORMERS_IS_AVAILABLE = False + +def is_nvidia(): + global cpu_state + if cpu_state == CPUState.GPU: + if torch.version.cuda: + return True + return False + +ENABLE_PYTORCH_ATTENTION = False +if args.attention_pytorch: + ENABLE_PYTORCH_ATTENTION = True + XFORMERS_IS_AVAILABLE = False + +VAE_DTYPE = torch.float32 + +try: + if is_nvidia(): + torch_version = torch.version.__version__ + if int(torch_version[0]) >= 2: + if ENABLE_PYTORCH_ATTENTION == False and args.attention_split == False and args.attention_quad == False: + ENABLE_PYTORCH_ATTENTION = True + if torch.cuda.is_bf16_supported() and torch.cuda.get_device_properties(torch.cuda.current_device()).major >= 8: + VAE_DTYPE = torch.bfloat16 + if is_intel_xpu(): + if args.attention_split == False and args.attention_quad == False: + ENABLE_PYTORCH_ATTENTION = True +except: + pass + +if is_intel_xpu(): + VAE_DTYPE = torch.bfloat16 + +if args.vae_in_cpu: + VAE_DTYPE = torch.float32 + +if args.vae_in_fp16: + VAE_DTYPE = torch.float16 +elif args.vae_in_bf16: + VAE_DTYPE = torch.bfloat16 +elif args.vae_in_fp32: + VAE_DTYPE = torch.float32 + + +if ENABLE_PYTORCH_ATTENTION: + torch.backends.cuda.enable_math_sdp(True) + torch.backends.cuda.enable_flash_sdp(True) + torch.backends.cuda.enable_mem_efficient_sdp(True) + +if args.always_low_vram: + set_vram_to = VRAMState.LOW_VRAM + lowvram_available = True +elif args.always_no_vram: + set_vram_to = VRAMState.NO_VRAM +elif args.always_high_vram or args.always_gpu: + vram_state = VRAMState.HIGH_VRAM + +FORCE_FP32 = False +FORCE_FP16 = False +if args.all_in_fp32: + print("Forcing FP32, if this improves things please report it.") + FORCE_FP32 = True + +if args.all_in_fp16: + print("Forcing FP16.") + FORCE_FP16 = True + +if lowvram_available: + if set_vram_to in (VRAMState.LOW_VRAM, VRAMState.NO_VRAM): + vram_state = set_vram_to + + +if cpu_state != CPUState.GPU: + vram_state = VRAMState.DISABLED + +if cpu_state == CPUState.MPS: + vram_state = VRAMState.SHARED + +print(f"Set vram state to: {vram_state.name}") + +ALWAYS_VRAM_OFFLOAD = args.always_offload_from_vram + +if ALWAYS_VRAM_OFFLOAD: + print("Always offload VRAM") + +def get_torch_device_name(device): + if hasattr(device, 'type'): + if device.type == "cuda": + try: + allocator_backend = torch.cuda.get_allocator_backend() + except: + allocator_backend = "" + return "{} {} : {}".format(device, torch.cuda.get_device_name(device), allocator_backend) + else: + return "{}".format(device.type) + elif is_intel_xpu(): + return "{} {}".format(device, torch.xpu.get_device_name(device)) + else: + return "CUDA {}: {}".format(device, torch.cuda.get_device_name(device)) + +try: + print("Device:", get_torch_device_name(get_torch_device())) +except: + print("Could not pick default device.") + +print("VAE dtype:", VAE_DTYPE) + +current_loaded_models = [] + +def module_size(module): + module_mem = 0 + sd = module.state_dict() + for k in sd: + t = sd[k] + module_mem += t.nelement() * t.element_size() + return module_mem + +class LoadedModel: + def __init__(self, model): + self.model = model + self.model_accelerated = False + self.device = model.load_device + + def model_memory(self): + return self.model.model_size() + + def model_memory_required(self, device): + if device == self.model.current_device: + return 0 + else: + return self.model_memory() + + def model_load(self, lowvram_model_memory=0): + patch_model_to = None + if lowvram_model_memory == 0: + patch_model_to = self.device + + self.model.model_patches_to(self.device) + self.model.model_patches_to(self.model.model_dtype()) + + try: + self.real_model = self.model.patch_model(device_to=patch_model_to) #TODO: do something with loras and offloading to CPU + except Exception as e: + self.model.unpatch_model(self.model.offload_device) + self.model_unload() + raise e + + if lowvram_model_memory > 0: + print("loading in lowvram mode", lowvram_model_memory/(1024 * 1024)) + mem_counter = 0 + for m in self.real_model.modules(): + if hasattr(m, "ldm_patched_cast_weights"): + m.prev_ldm_patched_cast_weights = m.ldm_patched_cast_weights + m.ldm_patched_cast_weights = True + module_mem = module_size(m) + if mem_counter + module_mem < lowvram_model_memory: + m.to(self.device) + mem_counter += module_mem + elif hasattr(m, "weight"): #only modules with ldm_patched_cast_weights can be set to lowvram mode + m.to(self.device) + mem_counter += module_size(m) + print("lowvram: loaded module regularly", m) + + self.model_accelerated = True + + if is_intel_xpu() and not args.disable_ipex_hijack: + self.real_model = torch.xpu.optimize(self.real_model.eval(), inplace=True, auto_kernel_selection=True, graph_mode=True) + + return self.real_model + + def model_unload(self): + if self.model_accelerated: + for m in self.real_model.modules(): + if hasattr(m, "prev_ldm_patched_cast_weights"): + m.ldm_patched_cast_weights = m.prev_ldm_patched_cast_weights + del m.prev_ldm_patched_cast_weights + + self.model_accelerated = False + + self.model.unpatch_model(self.model.offload_device) + self.model.model_patches_to(self.model.offload_device) + + def __eq__(self, other): + return self.model is other.model + +def minimum_inference_memory(): + return (1024 * 1024 * 1024) + +def unload_model_clones(model): + to_unload = [] + for i in range(len(current_loaded_models)): + if model.is_clone(current_loaded_models[i].model): + to_unload = [i] + to_unload + + for i in to_unload: + print("unload clone", i) + current_loaded_models.pop(i).model_unload() + +def free_memory(memory_required, device, keep_loaded=[]): + unloaded_model = False + for i in range(len(current_loaded_models) -1, -1, -1): + if not ALWAYS_VRAM_OFFLOAD: + if get_free_memory(device) > memory_required: + break + shift_model = current_loaded_models[i] + if shift_model.device == device: + if shift_model not in keep_loaded: + m = current_loaded_models.pop(i) + m.model_unload() + del m + unloaded_model = True + + if unloaded_model: + soft_empty_cache() + else: + if vram_state != VRAMState.HIGH_VRAM: + mem_free_total, mem_free_torch = get_free_memory(device, torch_free_too=True) + if mem_free_torch > mem_free_total * 0.25: + soft_empty_cache() + +def load_models_gpu(models, memory_required=0): + global vram_state + + inference_memory = minimum_inference_memory() + extra_mem = max(inference_memory, memory_required) + + models_to_load = [] + models_already_loaded = [] + for x in models: + loaded_model = LoadedModel(x) + + if loaded_model in current_loaded_models: + index = current_loaded_models.index(loaded_model) + current_loaded_models.insert(0, current_loaded_models.pop(index)) + models_already_loaded.append(loaded_model) + else: + if hasattr(x, "model"): + print(f"Requested to load {x.model.__class__.__name__}") + models_to_load.append(loaded_model) + + if len(models_to_load) == 0: + devs = set(map(lambda a: a.device, models_already_loaded)) + for d in devs: + if d != torch.device("cpu"): + free_memory(extra_mem, d, models_already_loaded) + return + + print(f"Loading {len(models_to_load)} new model{'s' if len(models_to_load) > 1 else ''}") + + total_memory_required = {} + for loaded_model in models_to_load: + unload_model_clones(loaded_model.model) + total_memory_required[loaded_model.device] = total_memory_required.get(loaded_model.device, 0) + loaded_model.model_memory_required(loaded_model.device) + + for device in total_memory_required: + if device != torch.device("cpu"): + free_memory(total_memory_required[device] * 1.3 + extra_mem, device, models_already_loaded) + + for loaded_model in models_to_load: + model = loaded_model.model + torch_dev = model.load_device + if is_device_cpu(torch_dev): + vram_set_state = VRAMState.DISABLED + else: + vram_set_state = vram_state + lowvram_model_memory = 0 + if lowvram_available and (vram_set_state == VRAMState.LOW_VRAM or vram_set_state == VRAMState.NORMAL_VRAM): + model_size = loaded_model.model_memory_required(torch_dev) + current_free_mem = get_free_memory(torch_dev) + lowvram_model_memory = int(max(64 * (1024 * 1024), (current_free_mem - 1024 * (1024 * 1024)) / 1.3 )) + if model_size > (current_free_mem - inference_memory): #only switch to lowvram if really necessary + vram_set_state = VRAMState.LOW_VRAM + else: + lowvram_model_memory = 0 + + if vram_set_state == VRAMState.NO_VRAM: + lowvram_model_memory = 64 * 1024 * 1024 + + cur_loaded_model = loaded_model.model_load(lowvram_model_memory) + current_loaded_models.insert(0, loaded_model) + return + + +def load_model_gpu(model): + return load_models_gpu([model]) + +def cleanup_models(): + to_delete = [] + for i in range(len(current_loaded_models)): + if sys.getrefcount(current_loaded_models[i].model) <= 2: + to_delete = [i] + to_delete + + for i in to_delete: + x = current_loaded_models.pop(i) + x.model_unload() + del x + +def dtype_size(dtype): + dtype_size = 4 + if dtype == torch.float16 or dtype == torch.bfloat16: + dtype_size = 2 + elif dtype == torch.float32: + dtype_size = 4 + else: + try: + dtype_size = dtype.itemsize + except: #Old pytorch doesn't have .itemsize + pass + return dtype_size + +def unet_offload_device(): + if vram_state == VRAMState.HIGH_VRAM: + return get_torch_device() + else: + return torch.device("cpu") + +def unet_inital_load_device(parameters, dtype): + torch_dev = get_torch_device() + if vram_state == VRAMState.HIGH_VRAM: + return torch_dev + + cpu_dev = torch.device("cpu") + if ALWAYS_VRAM_OFFLOAD: + return cpu_dev + + model_size = dtype_size(dtype) * parameters + + mem_dev = get_free_memory(torch_dev) + mem_cpu = get_free_memory(cpu_dev) + if mem_dev > mem_cpu and model_size < mem_dev: + return torch_dev + else: + return cpu_dev + +def unet_dtype(device=None, model_params=0): + if args.unet_in_bf16: + return torch.bfloat16 + if args.unet_in_fp16: + return torch.float16 + if args.unet_in_fp8_e4m3fn: + return torch.float8_e4m3fn + if args.unet_in_fp8_e5m2: + return torch.float8_e5m2 + if should_use_fp16(device=device, model_params=model_params): + return torch.float16 + return torch.float32 + +# None means no manual cast +def unet_manual_cast(weight_dtype, inference_device): + if weight_dtype == torch.float32: + return None + + fp16_supported = ldm_patched.modules.model_management.should_use_fp16(inference_device, prioritize_performance=False) + if fp16_supported and weight_dtype == torch.float16: + return None + + if fp16_supported: + return torch.float16 + else: + return torch.float32 + +def text_encoder_offload_device(): + if args.always_gpu: + return get_torch_device() + else: + return torch.device("cpu") + +def text_encoder_device(): + if args.always_gpu: + return get_torch_device() + elif vram_state == VRAMState.HIGH_VRAM or vram_state == VRAMState.NORMAL_VRAM: + if is_intel_xpu(): + return torch.device("cpu") + if should_use_fp16(prioritize_performance=False): + return get_torch_device() + else: + return torch.device("cpu") + else: + return torch.device("cpu") + +def text_encoder_dtype(device=None): + if args.clip_in_fp8_e4m3fn: + return torch.float8_e4m3fn + elif args.clip_in_fp8_e5m2: + return torch.float8_e5m2 + elif args.clip_in_fp16: + return torch.float16 + elif args.clip_in_fp32: + return torch.float32 + + if is_device_cpu(device): + return torch.float16 + + if should_use_fp16(device, prioritize_performance=False): + return torch.float16 + else: + return torch.float32 + +def intermediate_device(): + if args.always_gpu: + return get_torch_device() + else: + return torch.device("cpu") + +def vae_device(): + if args.vae_in_cpu: + return torch.device("cpu") + return get_torch_device() + +def vae_offload_device(): + if args.always_gpu: + return get_torch_device() + else: + return torch.device("cpu") + +def vae_dtype(): + global VAE_DTYPE + return VAE_DTYPE + +def get_autocast_device(dev): + if hasattr(dev, 'type'): + return dev.type + return "cuda" + +def supports_dtype(device, dtype): #TODO + if dtype == torch.float32: + return True + if is_device_cpu(device): + return False + if dtype == torch.float16: + return True + if dtype == torch.bfloat16: + return True + return False + +def device_supports_non_blocking(device): + if is_device_mps(device): + return False #pytorch bug? mps doesn't support non blocking + return True + +def cast_to_device(tensor, device, dtype, copy=False): + device_supports_cast = False + if tensor.dtype == torch.float32 or tensor.dtype == torch.float16: + device_supports_cast = True + elif tensor.dtype == torch.bfloat16: + if hasattr(device, 'type') and device.type.startswith("cuda"): + device_supports_cast = True + elif is_intel_xpu(): + device_supports_cast = True + + non_blocking = device_supports_non_blocking(device) + + if device_supports_cast: + if copy: + if tensor.device == device: + return tensor.to(dtype, copy=copy, non_blocking=non_blocking) + return tensor.to(device, copy=copy, non_blocking=non_blocking).to(dtype, non_blocking=non_blocking) + else: + return tensor.to(device, non_blocking=non_blocking).to(dtype, non_blocking=non_blocking) + else: + return tensor.to(device, dtype, copy=copy, non_blocking=non_blocking) + +def xformers_enabled(): + global directml_enabled + global cpu_state + if cpu_state != CPUState.GPU: + return False + if is_intel_xpu(): + return False + if directml_enabled: + return False + return XFORMERS_IS_AVAILABLE + + +def xformers_enabled_vae(): + enabled = xformers_enabled() + if not enabled: + return False + + return XFORMERS_ENABLED_VAE + +def pytorch_attention_enabled(): + global ENABLE_PYTORCH_ATTENTION + return ENABLE_PYTORCH_ATTENTION + +def pytorch_attention_flash_attention(): + global ENABLE_PYTORCH_ATTENTION + if ENABLE_PYTORCH_ATTENTION: + #TODO: more reliable way of checking for flash attention? + if is_nvidia(): #pytorch flash attention only works on Nvidia + return True + return False + +def get_free_memory(dev=None, torch_free_too=False): + global directml_enabled + if dev is None: + dev = get_torch_device() + + if hasattr(dev, 'type') and (dev.type == 'cpu' or dev.type == 'mps'): + mem_free_total = psutil.virtual_memory().available + mem_free_torch = mem_free_total + else: + if directml_enabled: + mem_free_total = 1024 * 1024 * 1024 #TODO + mem_free_torch = mem_free_total + elif is_intel_xpu(): + stats = torch.xpu.memory_stats(dev) + mem_active = stats['active_bytes.all.current'] + mem_allocated = stats['allocated_bytes.all.current'] + mem_reserved = stats['reserved_bytes.all.current'] + mem_free_torch = mem_reserved - mem_active + mem_free_total = torch.xpu.get_device_properties(dev).total_memory - mem_allocated + else: + stats = torch.cuda.memory_stats(dev) + mem_active = stats['active_bytes.all.current'] + mem_reserved = stats['reserved_bytes.all.current'] + mem_free_cuda, _ = torch.cuda.mem_get_info(dev) + mem_free_torch = mem_reserved - mem_active + mem_free_total = mem_free_cuda + mem_free_torch + + if torch_free_too: + return (mem_free_total, mem_free_torch) + else: + return mem_free_total + +def cpu_mode(): + global cpu_state + return cpu_state == CPUState.CPU + +def mps_mode(): + global cpu_state + return cpu_state == CPUState.MPS + +def is_device_cpu(device): + if hasattr(device, 'type'): + if (device.type == 'cpu'): + return True + return False + +def is_device_mps(device): + if hasattr(device, 'type'): + if (device.type == 'mps'): + return True + return False + +def should_use_fp16(device=None, model_params=0, prioritize_performance=True): + global directml_enabled + + if device is not None: + if is_device_cpu(device): + return False + + if FORCE_FP16: + return True + + if device is not None: #TODO + if is_device_mps(device): + return False + + if FORCE_FP32: + return False + + if directml_enabled: + return False + + if cpu_mode() or mps_mode(): + return False #TODO ? + + if is_intel_xpu(): + return True + + if torch.cuda.is_bf16_supported(): + return True + + props = torch.cuda.get_device_properties("cuda") + if props.major < 6: + return False + + fp16_works = False + #FP16 is confirmed working on a 1080 (GP104) but it's a bit slower than FP32 so it should only be enabled + #when the model doesn't actually fit on the card + #TODO: actually test if GP106 and others have the same type of behavior + nvidia_10_series = ["1080", "1070", "titan x", "p3000", "p3200", "p4000", "p4200", "p5000", "p5200", "p6000", "1060", "1050"] + for x in nvidia_10_series: + if x in props.name.lower(): + fp16_works = True + + if fp16_works: + free_model_memory = (get_free_memory() * 0.9 - minimum_inference_memory()) + if (not prioritize_performance) or model_params * 4 > free_model_memory: + return True + + if props.major < 7: + return False + + #FP16 is just broken on these cards + nvidia_16_series = ["1660", "1650", "1630", "T500", "T550", "T600", "MX550", "MX450", "CMP 30HX", "T2000", "T1000", "T1200"] + for x in nvidia_16_series: + if x in props.name: + return False + + return True + +def soft_empty_cache(force=False): + global cpu_state + if cpu_state == CPUState.MPS: + torch.mps.empty_cache() + elif is_intel_xpu(): + torch.xpu.empty_cache() + elif torch.cuda.is_available(): + if force or is_nvidia(): #This seems to make things worse on ROCm so I only do it for cuda + torch.cuda.empty_cache() + torch.cuda.ipc_collect() + +def unload_all_models(): + free_memory(1e30, get_torch_device()) + + +def resolve_lowvram_weight(weight, model, key): #TODO: remove + return weight + +#TODO: might be cleaner to put this somewhere else +import threading + +class InterruptProcessingException(Exception): + pass + +interrupt_processing_mutex = threading.RLock() + +interrupt_processing = False +def interrupt_current_processing(value=True): + global interrupt_processing + global interrupt_processing_mutex + with interrupt_processing_mutex: + interrupt_processing = value + +def processing_interrupted(): + global interrupt_processing + global interrupt_processing_mutex + with interrupt_processing_mutex: + return interrupt_processing + +def throw_exception_if_processing_interrupted(): + global interrupt_processing + global interrupt_processing_mutex + with interrupt_processing_mutex: + if interrupt_processing: + interrupt_processing = False + raise InterruptProcessingException() diff --git a/ldm_patched/modules/model_patcher.py b/ldm_patched/modules/model_patcher.py new file mode 100644 index 0000000000000000000000000000000000000000..dd816e52e1ee05c2c279af0823e91b71f2567fb9 --- /dev/null +++ b/ldm_patched/modules/model_patcher.py @@ -0,0 +1,357 @@ +import torch +import copy +import inspect + +import ldm_patched.modules.utils +import ldm_patched.modules.model_management + +class ModelPatcher: + def __init__(self, model, load_device, offload_device, size=0, current_device=None, weight_inplace_update=False): + self.size = size + self.model = model + self.patches = {} + self.backup = {} + self.object_patches = {} + self.object_patches_backup = {} + self.model_options = {"transformer_options":{}} + self.model_size() + self.load_device = load_device + self.offload_device = offload_device + if current_device is None: + self.current_device = self.offload_device + else: + self.current_device = current_device + + self.weight_inplace_update = weight_inplace_update + + def model_size(self): + if self.size > 0: + return self.size + model_sd = self.model.state_dict() + self.size = ldm_patched.modules.model_management.module_size(self.model) + self.model_keys = set(model_sd.keys()) + return self.size + + def clone(self): + n = ModelPatcher(self.model, self.load_device, self.offload_device, self.size, self.current_device, weight_inplace_update=self.weight_inplace_update) + n.patches = {} + for k in self.patches: + n.patches[k] = self.patches[k][:] + + n.object_patches = self.object_patches.copy() + n.model_options = copy.deepcopy(self.model_options) + n.model_keys = self.model_keys + return n + + def is_clone(self, other): + if hasattr(other, 'model') and self.model is other.model: + return True + return False + + def memory_required(self, input_shape): + return self.model.memory_required(input_shape=input_shape) + + def set_model_sampler_cfg_function(self, sampler_cfg_function, disable_cfg1_optimization=False): + if len(inspect.signature(sampler_cfg_function).parameters) == 3: + self.model_options["sampler_cfg_function"] = lambda args: sampler_cfg_function(args["cond"], args["uncond"], args["cond_scale"]) #Old way + else: + self.model_options["sampler_cfg_function"] = sampler_cfg_function + if disable_cfg1_optimization: + self.model_options["disable_cfg1_optimization"] = True + + def set_model_sampler_post_cfg_function(self, post_cfg_function, disable_cfg1_optimization=False): + self.model_options["sampler_post_cfg_function"] = self.model_options.get("sampler_post_cfg_function", []) + [post_cfg_function] + if disable_cfg1_optimization: + self.model_options["disable_cfg1_optimization"] = True + + def set_model_unet_function_wrapper(self, unet_wrapper_function): + self.model_options["model_function_wrapper"] = unet_wrapper_function + + def set_model_patch(self, patch, name): + to = self.model_options["transformer_options"] + if "patches" not in to: + to["patches"] = {} + to["patches"][name] = to["patches"].get(name, []) + [patch] + + def set_model_patch_replace(self, patch, name, block_name, number, transformer_index=None): + to = self.model_options["transformer_options"] + if "patches_replace" not in to: + to["patches_replace"] = {} + if name not in to["patches_replace"]: + to["patches_replace"][name] = {} + if transformer_index is not None: + block = (block_name, number, transformer_index) + else: + block = (block_name, number) + to["patches_replace"][name][block] = patch + + def set_model_attn1_patch(self, patch): + self.set_model_patch(patch, "attn1_patch") + + def set_model_attn2_patch(self, patch): + self.set_model_patch(patch, "attn2_patch") + + def set_model_attn1_replace(self, patch, block_name, number, transformer_index=None): + self.set_model_patch_replace(patch, "attn1", block_name, number, transformer_index) + + def set_model_attn2_replace(self, patch, block_name, number, transformer_index=None): + self.set_model_patch_replace(patch, "attn2", block_name, number, transformer_index) + + def set_model_attn1_output_patch(self, patch): + self.set_model_patch(patch, "attn1_output_patch") + + def set_model_attn2_output_patch(self, patch): + self.set_model_patch(patch, "attn2_output_patch") + + def set_model_input_block_patch(self, patch): + self.set_model_patch(patch, "input_block_patch") + + def set_model_input_block_patch_after_skip(self, patch): + self.set_model_patch(patch, "input_block_patch_after_skip") + + def set_model_output_block_patch(self, patch): + self.set_model_patch(patch, "output_block_patch") + + def add_object_patch(self, name, obj): + self.object_patches[name] = obj + + def model_patches_to(self, device): + to = self.model_options["transformer_options"] + if "patches" in to: + patches = to["patches"] + for name in patches: + patch_list = patches[name] + for i in range(len(patch_list)): + if hasattr(patch_list[i], "to"): + patch_list[i] = patch_list[i].to(device) + if "patches_replace" in to: + patches = to["patches_replace"] + for name in patches: + patch_list = patches[name] + for k in patch_list: + if hasattr(patch_list[k], "to"): + patch_list[k] = patch_list[k].to(device) + if "model_function_wrapper" in self.model_options: + wrap_func = self.model_options["model_function_wrapper"] + if hasattr(wrap_func, "to"): + self.model_options["model_function_wrapper"] = wrap_func.to(device) + + def model_dtype(self): + if hasattr(self.model, "get_dtype"): + return self.model.get_dtype() + + def add_patches(self, patches, strength_patch=1.0, strength_model=1.0): + p = set() + for k in patches: + if k in self.model_keys: + p.add(k) + current_patches = self.patches.get(k, []) + current_patches.append((strength_patch, patches[k], strength_model)) + self.patches[k] = current_patches + + return list(p) + + def get_key_patches(self, filter_prefix=None): + ldm_patched.modules.model_management.unload_model_clones(self) + model_sd = self.model_state_dict() + p = {} + for k in model_sd: + if filter_prefix is not None: + if not k.startswith(filter_prefix): + continue + if k in self.patches: + p[k] = [model_sd[k]] + self.patches[k] + else: + p[k] = (model_sd[k],) + return p + + def model_state_dict(self, filter_prefix=None): + sd = self.model.state_dict() + keys = list(sd.keys()) + if filter_prefix is not None: + for k in keys: + if not k.startswith(filter_prefix): + sd.pop(k) + return sd + + def patch_model(self, device_to=None, patch_weights=True): + for k in self.object_patches: + old = getattr(self.model, k) + if k not in self.object_patches_backup: + self.object_patches_backup[k] = old + setattr(self.model, k, self.object_patches[k]) + + if patch_weights: + model_sd = self.model_state_dict() + for key in self.patches: + if key not in model_sd: + print("could not patch. key doesn't exist in model:", key) + continue + + weight = model_sd[key] + + inplace_update = self.weight_inplace_update + + if key not in self.backup: + self.backup[key] = weight.to(device=self.offload_device, copy=inplace_update) + + if device_to is not None: + temp_weight = ldm_patched.modules.model_management.cast_to_device(weight, device_to, torch.float32, copy=True) + else: + temp_weight = weight.to(torch.float32, copy=True) + out_weight = self.calculate_weight(self.patches[key], temp_weight, key).to(weight.dtype) + if inplace_update: + ldm_patched.modules.utils.copy_to_param(self.model, key, out_weight) + else: + ldm_patched.modules.utils.set_attr(self.model, key, out_weight) + del temp_weight + + if device_to is not None: + self.model.to(device_to) + self.current_device = device_to + + return self.model + + def calculate_weight(self, patches, weight, key): + for p in patches: + alpha = p[0] + v = p[1] + strength_model = p[2] + + if strength_model != 1.0: + weight *= strength_model + + if isinstance(v, list): + v = (self.calculate_weight(v[1:], v[0].clone(), key), ) + + if len(v) == 1: + patch_type = "diff" + elif len(v) == 2: + patch_type = v[0] + v = v[1] + + if patch_type == "diff": + w1 = v[0] + if alpha != 0.0: + if w1.shape != weight.shape: + print("WARNING SHAPE MISMATCH {} WEIGHT NOT MERGED {} != {}".format(key, w1.shape, weight.shape)) + else: + weight += alpha * ldm_patched.modules.model_management.cast_to_device(w1, weight.device, weight.dtype) + elif patch_type == "lora": #lora/locon + mat1 = ldm_patched.modules.model_management.cast_to_device(v[0], weight.device, torch.float32) + mat2 = ldm_patched.modules.model_management.cast_to_device(v[1], weight.device, torch.float32) + if v[2] is not None: + alpha *= v[2] / mat2.shape[0] + if v[3] is not None: + #locon mid weights, hopefully the math is fine because I didn't properly test it + mat3 = ldm_patched.modules.model_management.cast_to_device(v[3], weight.device, torch.float32) + final_shape = [mat2.shape[1], mat2.shape[0], mat3.shape[2], mat3.shape[3]] + mat2 = torch.mm(mat2.transpose(0, 1).flatten(start_dim=1), mat3.transpose(0, 1).flatten(start_dim=1)).reshape(final_shape).transpose(0, 1) + try: + weight += (alpha * torch.mm(mat1.flatten(start_dim=1), mat2.flatten(start_dim=1))).reshape(weight.shape).type(weight.dtype) + except Exception as e: + print("ERROR", key, e) + elif patch_type == "lokr": + w1 = v[0] + w2 = v[1] + w1_a = v[3] + w1_b = v[4] + w2_a = v[5] + w2_b = v[6] + t2 = v[7] + dim = None + + if w1 is None: + dim = w1_b.shape[0] + w1 = torch.mm(ldm_patched.modules.model_management.cast_to_device(w1_a, weight.device, torch.float32), + ldm_patched.modules.model_management.cast_to_device(w1_b, weight.device, torch.float32)) + else: + w1 = ldm_patched.modules.model_management.cast_to_device(w1, weight.device, torch.float32) + + if w2 is None: + dim = w2_b.shape[0] + if t2 is None: + w2 = torch.mm(ldm_patched.modules.model_management.cast_to_device(w2_a, weight.device, torch.float32), + ldm_patched.modules.model_management.cast_to_device(w2_b, weight.device, torch.float32)) + else: + w2 = torch.einsum('i j k l, j r, i p -> p r k l', + ldm_patched.modules.model_management.cast_to_device(t2, weight.device, torch.float32), + ldm_patched.modules.model_management.cast_to_device(w2_b, weight.device, torch.float32), + ldm_patched.modules.model_management.cast_to_device(w2_a, weight.device, torch.float32)) + else: + w2 = ldm_patched.modules.model_management.cast_to_device(w2, weight.device, torch.float32) + + if len(w2.shape) == 4: + w1 = w1.unsqueeze(2).unsqueeze(2) + if v[2] is not None and dim is not None: + alpha *= v[2] / dim + + try: + weight += alpha * torch.kron(w1, w2).reshape(weight.shape).type(weight.dtype) + except Exception as e: + print("ERROR", key, e) + elif patch_type == "loha": + w1a = v[0] + w1b = v[1] + if v[2] is not None: + alpha *= v[2] / w1b.shape[0] + w2a = v[3] + w2b = v[4] + if v[5] is not None: #cp decomposition + t1 = v[5] + t2 = v[6] + m1 = torch.einsum('i j k l, j r, i p -> p r k l', + ldm_patched.modules.model_management.cast_to_device(t1, weight.device, torch.float32), + ldm_patched.modules.model_management.cast_to_device(w1b, weight.device, torch.float32), + ldm_patched.modules.model_management.cast_to_device(w1a, weight.device, torch.float32)) + + m2 = torch.einsum('i j k l, j r, i p -> p r k l', + ldm_patched.modules.model_management.cast_to_device(t2, weight.device, torch.float32), + ldm_patched.modules.model_management.cast_to_device(w2b, weight.device, torch.float32), + ldm_patched.modules.model_management.cast_to_device(w2a, weight.device, torch.float32)) + else: + m1 = torch.mm(ldm_patched.modules.model_management.cast_to_device(w1a, weight.device, torch.float32), + ldm_patched.modules.model_management.cast_to_device(w1b, weight.device, torch.float32)) + m2 = torch.mm(ldm_patched.modules.model_management.cast_to_device(w2a, weight.device, torch.float32), + ldm_patched.modules.model_management.cast_to_device(w2b, weight.device, torch.float32)) + + try: + weight += (alpha * m1 * m2).reshape(weight.shape).type(weight.dtype) + except Exception as e: + print("ERROR", key, e) + elif patch_type == "glora": + if v[4] is not None: + alpha *= v[4] / v[0].shape[0] + + a1 = ldm_patched.modules.model_management.cast_to_device(v[0].flatten(start_dim=1), weight.device, torch.float32) + a2 = ldm_patched.modules.model_management.cast_to_device(v[1].flatten(start_dim=1), weight.device, torch.float32) + b1 = ldm_patched.modules.model_management.cast_to_device(v[2].flatten(start_dim=1), weight.device, torch.float32) + b2 = ldm_patched.modules.model_management.cast_to_device(v[3].flatten(start_dim=1), weight.device, torch.float32) + + weight += ((torch.mm(b2, b1) + torch.mm(torch.mm(weight.flatten(start_dim=1), a2), a1)) * alpha).reshape(weight.shape).type(weight.dtype) + else: + print("patch type not recognized", patch_type, key) + + return weight + + def unpatch_model(self, device_to=None): + keys = list(self.backup.keys()) + + if self.weight_inplace_update: + for k in keys: + ldm_patched.modules.utils.copy_to_param(self.model, k, self.backup[k]) + else: + for k in keys: + ldm_patched.modules.utils.set_attr(self.model, k, self.backup[k]) + + self.backup = {} + + if device_to is not None: + self.model.to(device_to) + self.current_device = device_to + + keys = list(self.object_patches_backup.keys()) + for k in keys: + setattr(self.model, k, self.object_patches_backup[k]) + + self.object_patches_backup = {} diff --git a/ldm_patched/modules/model_sampling.py b/ldm_patched/modules/model_sampling.py new file mode 100644 index 0000000000000000000000000000000000000000..f39e275d3824e87eb1e1e069d16869d0abdad630 --- /dev/null +++ b/ldm_patched/modules/model_sampling.py @@ -0,0 +1,136 @@ +import torch +import numpy as np +from ldm_patched.ldm.modules.diffusionmodules.util import make_beta_schedule +import math + +class EPS: + def calculate_input(self, sigma, noise): + sigma = sigma.view(sigma.shape[:1] + (1,) * (noise.ndim - 1)) + return noise / (sigma ** 2 + self.sigma_data ** 2) ** 0.5 + + def calculate_denoised(self, sigma, model_output, model_input): + sigma = sigma.view(sigma.shape[:1] + (1,) * (model_output.ndim - 1)) + return model_input - model_output * sigma + + +class V_PREDICTION(EPS): + def calculate_denoised(self, sigma, model_output, model_input): + sigma = sigma.view(sigma.shape[:1] + (1,) * (model_output.ndim - 1)) + return model_input * self.sigma_data ** 2 / (sigma ** 2 + self.sigma_data ** 2) - model_output * sigma * self.sigma_data / (sigma ** 2 + self.sigma_data ** 2) ** 0.5 + + +class ModelSamplingDiscrete(torch.nn.Module): + def __init__(self, model_config=None): + super().__init__() + + if model_config is not None: + sampling_settings = model_config.sampling_settings + else: + sampling_settings = {} + + beta_schedule = sampling_settings.get("beta_schedule", "linear") + linear_start = sampling_settings.get("linear_start", 0.00085) + linear_end = sampling_settings.get("linear_end", 0.012) + + self._register_schedule(given_betas=None, beta_schedule=beta_schedule, timesteps=1000, linear_start=linear_start, linear_end=linear_end, cosine_s=8e-3) + self.sigma_data = 1.0 + + def _register_schedule(self, given_betas=None, beta_schedule="linear", timesteps=1000, + linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3): + if given_betas is not None: + betas = given_betas + else: + betas = make_beta_schedule(beta_schedule, timesteps, linear_start=linear_start, linear_end=linear_end, cosine_s=cosine_s) + alphas = 1. - betas + alphas_cumprod = torch.tensor(np.cumprod(alphas, axis=0), dtype=torch.float32) + # alphas_cumprod_prev = np.append(1., alphas_cumprod[:-1]) + + timesteps, = betas.shape + self.num_timesteps = int(timesteps) + self.linear_start = linear_start + self.linear_end = linear_end + + # self.register_buffer('betas', torch.tensor(betas, dtype=torch.float32)) + # self.register_buffer('alphas_cumprod', torch.tensor(alphas_cumprod, dtype=torch.float32)) + # self.register_buffer('alphas_cumprod_prev', torch.tensor(alphas_cumprod_prev, dtype=torch.float32)) + + sigmas = ((1 - alphas_cumprod) / alphas_cumprod) ** 0.5 + self.set_sigmas(sigmas) + + def set_sigmas(self, sigmas): + self.register_buffer('sigmas', sigmas) + self.register_buffer('log_sigmas', sigmas.log()) + + @property + def sigma_min(self): + return self.sigmas[0] + + @property + def sigma_max(self): + return self.sigmas[-1] + + def timestep(self, sigma): + log_sigma = sigma.log() + dists = log_sigma.to(self.log_sigmas.device) - self.log_sigmas[:, None] + return dists.abs().argmin(dim=0).view(sigma.shape).to(sigma.device) + + def sigma(self, timestep): + t = torch.clamp(timestep.float().to(self.log_sigmas.device), min=0, max=(len(self.sigmas) - 1)) + low_idx = t.floor().long() + high_idx = t.ceil().long() + w = t.frac() + log_sigma = (1 - w) * self.log_sigmas[low_idx] + w * self.log_sigmas[high_idx] + return log_sigma.exp().to(timestep.device) + + def percent_to_sigma(self, percent): + if percent <= 0.0: + return 999999999.9 + if percent >= 1.0: + return 0.0 + percent = 1.0 - percent + return self.sigma(torch.tensor(percent * 999.0)).item() + + +class ModelSamplingContinuousEDM(torch.nn.Module): + def __init__(self, model_config=None): + super().__init__() + self.sigma_data = 1.0 + + if model_config is not None: + sampling_settings = model_config.sampling_settings + else: + sampling_settings = {} + + sigma_min = sampling_settings.get("sigma_min", 0.002) + sigma_max = sampling_settings.get("sigma_max", 120.0) + self.set_sigma_range(sigma_min, sigma_max) + + def set_sigma_range(self, sigma_min, sigma_max): + sigmas = torch.linspace(math.log(sigma_min), math.log(sigma_max), 1000).exp() + + self.register_buffer('sigmas', sigmas) #for compatibility with some schedulers + self.register_buffer('log_sigmas', sigmas.log()) + + @property + def sigma_min(self): + return self.sigmas[0] + + @property + def sigma_max(self): + return self.sigmas[-1] + + def timestep(self, sigma): + return 0.25 * sigma.log() + + def sigma(self, timestep): + return (timestep / 0.25).exp() + + def percent_to_sigma(self, percent): + if percent <= 0.0: + return 999999999.9 + if percent >= 1.0: + return 0.0 + percent = 1.0 - percent + + log_sigma_min = math.log(self.sigma_min) + return math.exp((math.log(self.sigma_max) - log_sigma_min) * percent + log_sigma_min) diff --git a/ldm_patched/modules/ops.py b/ldm_patched/modules/ops.py new file mode 100644 index 0000000000000000000000000000000000000000..2d7fa3776bc9a490563bcebac7d7f27c3e90a39e --- /dev/null +++ b/ldm_patched/modules/ops.py @@ -0,0 +1,114 @@ +import torch +import ldm_patched.modules.model_management + +def cast_bias_weight(s, input): + bias = None + non_blocking = ldm_patched.modules.model_management.device_supports_non_blocking(input.device) + if s.bias is not None: + bias = s.bias.to(device=input.device, dtype=input.dtype, non_blocking=non_blocking) + weight = s.weight.to(device=input.device, dtype=input.dtype, non_blocking=non_blocking) + return weight, bias + + +class disable_weight_init: + class Linear(torch.nn.Linear): + ldm_patched_cast_weights = False + def reset_parameters(self): + return None + + def forward_ldm_patched_cast_weights(self, input): + weight, bias = cast_bias_weight(self, input) + return torch.nn.functional.linear(input, weight, bias) + + def forward(self, *args, **kwargs): + if self.ldm_patched_cast_weights: + return self.forward_ldm_patched_cast_weights(*args, **kwargs) + else: + return super().forward(*args, **kwargs) + + class Conv2d(torch.nn.Conv2d): + ldm_patched_cast_weights = False + def reset_parameters(self): + return None + + def forward_ldm_patched_cast_weights(self, input): + weight, bias = cast_bias_weight(self, input) + return self._conv_forward(input, weight, bias) + + def forward(self, *args, **kwargs): + if self.ldm_patched_cast_weights: + return self.forward_ldm_patched_cast_weights(*args, **kwargs) + else: + return super().forward(*args, **kwargs) + + class Conv3d(torch.nn.Conv3d): + ldm_patched_cast_weights = False + def reset_parameters(self): + return None + + def forward_ldm_patched_cast_weights(self, input): + weight, bias = cast_bias_weight(self, input) + return self._conv_forward(input, weight, bias) + + def forward(self, *args, **kwargs): + if self.ldm_patched_cast_weights: + return self.forward_ldm_patched_cast_weights(*args, **kwargs) + else: + return super().forward(*args, **kwargs) + + class GroupNorm(torch.nn.GroupNorm): + ldm_patched_cast_weights = False + def reset_parameters(self): + return None + + def forward_ldm_patched_cast_weights(self, input): + weight, bias = cast_bias_weight(self, input) + return torch.nn.functional.group_norm(input, self.num_groups, weight, bias, self.eps) + + def forward(self, *args, **kwargs): + if self.ldm_patched_cast_weights: + return self.forward_ldm_patched_cast_weights(*args, **kwargs) + else: + return super().forward(*args, **kwargs) + + + class LayerNorm(torch.nn.LayerNorm): + ldm_patched_cast_weights = False + def reset_parameters(self): + return None + + def forward_ldm_patched_cast_weights(self, input): + weight, bias = cast_bias_weight(self, input) + return torch.nn.functional.layer_norm(input, self.normalized_shape, weight, bias, self.eps) + + def forward(self, *args, **kwargs): + if self.ldm_patched_cast_weights: + return self.forward_ldm_patched_cast_weights(*args, **kwargs) + else: + return super().forward(*args, **kwargs) + + @classmethod + def conv_nd(s, dims, *args, **kwargs): + if dims == 2: + return s.Conv2d(*args, **kwargs) + elif dims == 3: + return s.Conv3d(*args, **kwargs) + else: + raise ValueError(f"unsupported dimensions: {dims}") + + +class manual_cast(disable_weight_init): + class Linear(disable_weight_init.Linear): + ldm_patched_cast_weights = True + + class Conv2d(disable_weight_init.Conv2d): + ldm_patched_cast_weights = True + + class Conv3d(disable_weight_init.Conv3d): + ldm_patched_cast_weights = True + + class GroupNorm(disable_weight_init.GroupNorm): + ldm_patched_cast_weights = True + + class LayerNorm(disable_weight_init.LayerNorm): + ldm_patched_cast_weights = True diff --git a/ldm_patched/modules/options.py b/ldm_patched/modules/options.py new file mode 100644 index 0000000000000000000000000000000000000000..f7f8af41ebd8b9669ef0ef21827ea6195bcb4752 --- /dev/null +++ b/ldm_patched/modules/options.py @@ -0,0 +1,6 @@ + +args_parsing = False + +def enable_args_parsing(enable=True): + global args_parsing + args_parsing = enable diff --git a/ldm_patched/modules/sample.py b/ldm_patched/modules/sample.py new file mode 100644 index 0000000000000000000000000000000000000000..0f4839503758aa4bbfb76c26ef7a383627eeff4a --- /dev/null +++ b/ldm_patched/modules/sample.py @@ -0,0 +1,118 @@ +import torch +import ldm_patched.modules.model_management +import ldm_patched.modules.samplers +import ldm_patched.modules.conds +import ldm_patched.modules.utils +import math +import numpy as np + +def prepare_noise(latent_image, seed, noise_inds=None): + """ + creates random noise given a latent image and a seed. + optional arg skip can be used to skip and discard x number of noise generations for a given seed + """ + generator = torch.manual_seed(seed) + if noise_inds is None: + return torch.randn(latent_image.size(), dtype=latent_image.dtype, layout=latent_image.layout, generator=generator, device="cpu") + + unique_inds, inverse = np.unique(noise_inds, return_inverse=True) + noises = [] + for i in range(unique_inds[-1]+1): + noise = torch.randn([1] + list(latent_image.size())[1:], dtype=latent_image.dtype, layout=latent_image.layout, generator=generator, device="cpu") + if i in unique_inds: + noises.append(noise) + noises = [noises[i] for i in inverse] + noises = torch.cat(noises, axis=0) + return noises + +def prepare_mask(noise_mask, shape, device): + """ensures noise mask is of proper dimensions""" + noise_mask = torch.nn.functional.interpolate(noise_mask.reshape((-1, 1, noise_mask.shape[-2], noise_mask.shape[-1])), size=(shape[2], shape[3]), mode="bilinear") + noise_mask = torch.cat([noise_mask] * shape[1], dim=1) + noise_mask = ldm_patched.modules.utils.repeat_to_batch_size(noise_mask, shape[0]) + noise_mask = noise_mask.to(device) + return noise_mask + +def get_models_from_cond(cond, model_type): + models = [] + for c in cond: + if model_type in c: + models += [c[model_type]] + return models + +def convert_cond(cond): + out = [] + for c in cond: + temp = c[1].copy() + model_conds = temp.get("model_conds", {}) + if c[0] is not None: + model_conds["c_crossattn"] = ldm_patched.modules.conds.CONDCrossAttn(c[0]) #TODO: remove + temp["cross_attn"] = c[0] + temp["model_conds"] = model_conds + out.append(temp) + return out + +def get_additional_models(positive, negative, dtype): + """loads additional models in positive and negative conditioning""" + control_nets = set(get_models_from_cond(positive, "control") + get_models_from_cond(negative, "control")) + + inference_memory = 0 + control_models = [] + for m in control_nets: + control_models += m.get_models() + inference_memory += m.inference_memory_requirements(dtype) + + gligen = get_models_from_cond(positive, "gligen") + get_models_from_cond(negative, "gligen") + gligen = [x[1] for x in gligen] + models = control_models + gligen + return models, inference_memory + +def cleanup_additional_models(models): + """cleanup additional models that were loaded""" + for m in models: + if hasattr(m, 'cleanup'): + m.cleanup() + +def prepare_sampling(model, noise_shape, positive, negative, noise_mask): + device = model.load_device + positive = convert_cond(positive) + negative = convert_cond(negative) + + if noise_mask is not None: + noise_mask = prepare_mask(noise_mask, noise_shape, device) + + real_model = None + models, inference_memory = get_additional_models(positive, negative, model.model_dtype()) + ldm_patched.modules.model_management.load_models_gpu([model] + models, model.memory_required([noise_shape[0] * 2] + list(noise_shape[1:])) + inference_memory) + real_model = model.model + + return real_model, positive, negative, noise_mask, models + + +def sample(model, noise, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, denoise=1.0, disable_noise=False, start_step=None, last_step=None, force_full_denoise=False, noise_mask=None, sigmas=None, callback=None, disable_pbar=False, seed=None): + real_model, positive_copy, negative_copy, noise_mask, models = prepare_sampling(model, noise.shape, positive, negative, noise_mask) + + noise = noise.to(model.load_device) + latent_image = latent_image.to(model.load_device) + + sampler = ldm_patched.modules.samplers.KSampler(real_model, steps=steps, device=model.load_device, sampler=sampler_name, scheduler=scheduler, denoise=denoise, model_options=model.model_options) + + samples = sampler.sample(noise, positive_copy, negative_copy, cfg=cfg, latent_image=latent_image, start_step=start_step, last_step=last_step, force_full_denoise=force_full_denoise, denoise_mask=noise_mask, sigmas=sigmas, callback=callback, disable_pbar=disable_pbar, seed=seed) + samples = samples.to(ldm_patched.modules.model_management.intermediate_device()) + + cleanup_additional_models(models) + cleanup_additional_models(set(get_models_from_cond(positive_copy, "control") + get_models_from_cond(negative_copy, "control"))) + return samples + +def sample_custom(model, noise, cfg, sampler, sigmas, positive, negative, latent_image, noise_mask=None, callback=None, disable_pbar=False, seed=None): + real_model, positive_copy, negative_copy, noise_mask, models = prepare_sampling(model, noise.shape, positive, negative, noise_mask) + noise = noise.to(model.load_device) + latent_image = latent_image.to(model.load_device) + sigmas = sigmas.to(model.load_device) + + samples = ldm_patched.modules.samplers.sample(real_model, noise, positive_copy, negative_copy, cfg, model.load_device, sampler, sigmas, model_options=model.model_options, latent_image=latent_image, denoise_mask=noise_mask, callback=callback, disable_pbar=disable_pbar, seed=seed) + samples = samples.to(ldm_patched.modules.model_management.intermediate_device()) + cleanup_additional_models(models) + cleanup_additional_models(set(get_models_from_cond(positive_copy, "control") + get_models_from_cond(negative_copy, "control"))) + return samples + diff --git a/ldm_patched/modules/samplers.py b/ldm_patched/modules/samplers.py new file mode 100644 index 0000000000000000000000000000000000000000..1f69d2b101403fb2fcaf3f2ab020bc10f4058c45 --- /dev/null +++ b/ldm_patched/modules/samplers.py @@ -0,0 +1,712 @@ +from ldm_patched.k_diffusion import sampling as k_diffusion_sampling +from ldm_patched.unipc import uni_pc +import torch +import collections +from ldm_patched.modules import model_management +import math + +def get_area_and_mult(conds, x_in, timestep_in): + area = (x_in.shape[2], x_in.shape[3], 0, 0) + strength = 1.0 + + if 'timestep_start' in conds: + timestep_start = conds['timestep_start'] + if timestep_in[0] > timestep_start: + return None + if 'timestep_end' in conds: + timestep_end = conds['timestep_end'] + if timestep_in[0] < timestep_end: + return None + if 'area' in conds: + area = conds['area'] + if 'strength' in conds: + strength = conds['strength'] + + input_x = x_in[:,:,area[2]:area[0] + area[2],area[3]:area[1] + area[3]] + if 'mask' in conds: + # Scale the mask to the size of the input + # The mask should have been resized as we began the sampling process + mask_strength = 1.0 + if "mask_strength" in conds: + mask_strength = conds["mask_strength"] + mask = conds['mask'] + assert(mask.shape[1] == x_in.shape[2]) + assert(mask.shape[2] == x_in.shape[3]) + mask = mask[:,area[2]:area[0] + area[2],area[3]:area[1] + area[3]] * mask_strength + mask = mask.unsqueeze(1).repeat(input_x.shape[0] // mask.shape[0], input_x.shape[1], 1, 1) + else: + mask = torch.ones_like(input_x) + mult = mask * strength + + if 'mask' not in conds: + rr = 8 + if area[2] != 0: + for t in range(rr): + mult[:,:,t:1+t,:] *= ((1.0/rr) * (t + 1)) + if (area[0] + area[2]) < x_in.shape[2]: + for t in range(rr): + mult[:,:,area[0] - 1 - t:area[0] - t,:] *= ((1.0/rr) * (t + 1)) + if area[3] != 0: + for t in range(rr): + mult[:,:,:,t:1+t] *= ((1.0/rr) * (t + 1)) + if (area[1] + area[3]) < x_in.shape[3]: + for t in range(rr): + mult[:,:,:,area[1] - 1 - t:area[1] - t] *= ((1.0/rr) * (t + 1)) + + conditioning = {} + model_conds = conds["model_conds"] + for c in model_conds: + conditioning[c] = model_conds[c].process_cond(batch_size=x_in.shape[0], device=x_in.device, area=area) + + control = conds.get('control', None) + + patches = None + if 'gligen' in conds: + gligen = conds['gligen'] + patches = {} + gligen_type = gligen[0] + gligen_model = gligen[1] + if gligen_type == "position": + gligen_patch = gligen_model.model.set_position(input_x.shape, gligen[2], input_x.device) + else: + gligen_patch = gligen_model.model.set_empty(input_x.shape, input_x.device) + + patches['middle_patch'] = [gligen_patch] + + cond_obj = collections.namedtuple('cond_obj', ['input_x', 'mult', 'conditioning', 'area', 'control', 'patches']) + return cond_obj(input_x, mult, conditioning, area, control, patches) + +def cond_equal_size(c1, c2): + if c1 is c2: + return True + if c1.keys() != c2.keys(): + return False + for k in c1: + if not c1[k].can_concat(c2[k]): + return False + return True + +def can_concat_cond(c1, c2): + if c1.input_x.shape != c2.input_x.shape: + return False + + def objects_concatable(obj1, obj2): + if (obj1 is None) != (obj2 is None): + return False + if obj1 is not None: + if obj1 is not obj2: + return False + return True + + if not objects_concatable(c1.control, c2.control): + return False + + if not objects_concatable(c1.patches, c2.patches): + return False + + return cond_equal_size(c1.conditioning, c2.conditioning) + +def cond_cat(c_list): + c_crossattn = [] + c_concat = [] + c_adm = [] + crossattn_max_len = 0 + + temp = {} + for x in c_list: + for k in x: + cur = temp.get(k, []) + cur.append(x[k]) + temp[k] = cur + + out = {} + for k in temp: + conds = temp[k] + out[k] = conds[0].concat(conds[1:]) + + return out + +def calc_cond_uncond_batch(model, cond, uncond, x_in, timestep, model_options): + out_cond = torch.zeros_like(x_in) + out_count = torch.ones_like(x_in) * 1e-37 + + out_uncond = torch.zeros_like(x_in) + out_uncond_count = torch.ones_like(x_in) * 1e-37 + + COND = 0 + UNCOND = 1 + + to_run = [] + for x in cond: + p = get_area_and_mult(x, x_in, timestep) + if p is None: + continue + + to_run += [(p, COND)] + if uncond is not None: + for x in uncond: + p = get_area_and_mult(x, x_in, timestep) + if p is None: + continue + + to_run += [(p, UNCOND)] + + while len(to_run) > 0: + first = to_run[0] + first_shape = first[0][0].shape + to_batch_temp = [] + for x in range(len(to_run)): + if can_concat_cond(to_run[x][0], first[0]): + to_batch_temp += [x] + + to_batch_temp.reverse() + to_batch = to_batch_temp[:1] + + free_memory = model_management.get_free_memory(x_in.device) + for i in range(1, len(to_batch_temp) + 1): + batch_amount = to_batch_temp[:len(to_batch_temp)//i] + input_shape = [len(batch_amount) * first_shape[0]] + list(first_shape)[1:] + if model.memory_required(input_shape) < free_memory: + to_batch = batch_amount + break + + input_x = [] + mult = [] + c = [] + cond_or_uncond = [] + area = [] + control = None + patches = None + for x in to_batch: + o = to_run.pop(x) + p = o[0] + input_x.append(p.input_x) + mult.append(p.mult) + c.append(p.conditioning) + area.append(p.area) + cond_or_uncond.append(o[1]) + control = p.control + patches = p.patches + + batch_chunks = len(cond_or_uncond) + input_x = torch.cat(input_x) + c = cond_cat(c) + timestep_ = torch.cat([timestep] * batch_chunks) + + if control is not None: + c['control'] = control.get_control(input_x, timestep_, c, len(cond_or_uncond)) + + transformer_options = {} + if 'transformer_options' in model_options: + transformer_options = model_options['transformer_options'].copy() + + if patches is not None: + if "patches" in transformer_options: + cur_patches = transformer_options["patches"].copy() + for p in patches: + if p in cur_patches: + cur_patches[p] = cur_patches[p] + patches[p] + else: + cur_patches[p] = patches[p] + else: + transformer_options["patches"] = patches + + transformer_options["cond_or_uncond"] = cond_or_uncond[:] + transformer_options["sigmas"] = timestep + + c['transformer_options'] = transformer_options + + if 'model_function_wrapper' in model_options: + output = model_options['model_function_wrapper'](model.apply_model, {"input": input_x, "timestep": timestep_, "c": c, "cond_or_uncond": cond_or_uncond}).chunk(batch_chunks) + else: + output = model.apply_model(input_x, timestep_, **c).chunk(batch_chunks) + del input_x + + for o in range(batch_chunks): + if cond_or_uncond[o] == COND: + out_cond[:,:,area[o][2]:area[o][0] + area[o][2],area[o][3]:area[o][1] + area[o][3]] += output[o] * mult[o] + out_count[:,:,area[o][2]:area[o][0] + area[o][2],area[o][3]:area[o][1] + area[o][3]] += mult[o] + else: + out_uncond[:,:,area[o][2]:area[o][0] + area[o][2],area[o][3]:area[o][1] + area[o][3]] += output[o] * mult[o] + out_uncond_count[:,:,area[o][2]:area[o][0] + area[o][2],area[o][3]:area[o][1] + area[o][3]] += mult[o] + del mult + + out_cond /= out_count + del out_count + out_uncond /= out_uncond_count + del out_uncond_count + return out_cond, out_uncond + +#The main sampling function shared by all the samplers +#Returns denoised +def sampling_function(model, x, timestep, uncond, cond, cond_scale, model_options={}, seed=None): + if math.isclose(cond_scale, 1.0) and model_options.get("disable_cfg1_optimization", False) == False: + uncond_ = None + else: + uncond_ = uncond + + cond_pred, uncond_pred = calc_cond_uncond_batch(model, cond, uncond_, x, timestep, model_options) + if "sampler_cfg_function" in model_options: + args = {"cond": x - cond_pred, "uncond": x - uncond_pred, "cond_scale": cond_scale, "timestep": timestep, "input": x, "sigma": timestep, + "cond_denoised": cond_pred, "uncond_denoised": uncond_pred, "model": model, "model_options": model_options} + cfg_result = x - model_options["sampler_cfg_function"](args) + else: + cfg_result = uncond_pred + (cond_pred - uncond_pred) * cond_scale + + for fn in model_options.get("sampler_post_cfg_function", []): + args = {"denoised": cfg_result, "cond": cond, "uncond": uncond, "model": model, "uncond_denoised": uncond_pred, "cond_denoised": cond_pred, + "sigma": timestep, "model_options": model_options, "input": x} + cfg_result = fn(args) + + return cfg_result + +class CFGNoisePredictor(torch.nn.Module): + def __init__(self, model): + super().__init__() + self.inner_model = model + def apply_model(self, x, timestep, cond, uncond, cond_scale, model_options={}, seed=None): + out = sampling_function(self.inner_model, x, timestep, uncond, cond, cond_scale, model_options=model_options, seed=seed) + return out + def forward(self, *args, **kwargs): + return self.apply_model(*args, **kwargs) + +class KSamplerX0Inpaint(torch.nn.Module): + def __init__(self, model): + super().__init__() + self.inner_model = model + def forward(self, x, sigma, uncond, cond, cond_scale, denoise_mask, model_options={}, seed=None): + if denoise_mask is not None: + latent_mask = 1. - denoise_mask + x = x * denoise_mask + (self.latent_image + self.noise * sigma.reshape([sigma.shape[0]] + [1] * (len(self.noise.shape) - 1))) * latent_mask + out = self.inner_model(x, sigma, cond=cond, uncond=uncond, cond_scale=cond_scale, model_options=model_options, seed=seed) + if denoise_mask is not None: + out = out * denoise_mask + self.latent_image * latent_mask + return out + +def simple_scheduler(model, steps): + s = model.model_sampling + sigs = [] + ss = len(s.sigmas) / steps + for x in range(steps): + sigs += [float(s.sigmas[-(1 + int(x * ss))])] + sigs += [0.0] + return torch.FloatTensor(sigs) + +def ddim_scheduler(model, steps): + s = model.model_sampling + sigs = [] + ss = len(s.sigmas) // steps + x = 1 + while x < len(s.sigmas): + sigs += [float(s.sigmas[x])] + x += ss + sigs = sigs[::-1] + sigs += [0.0] + return torch.FloatTensor(sigs) + +def normal_scheduler(model, steps, sgm=False, floor=False): + s = model.model_sampling + start = s.timestep(s.sigma_max) + end = s.timestep(s.sigma_min) + + if sgm: + timesteps = torch.linspace(start, end, steps + 1)[:-1] + else: + timesteps = torch.linspace(start, end, steps) + + sigs = [] + for x in range(len(timesteps)): + ts = timesteps[x] + sigs.append(s.sigma(ts)) + sigs += [0.0] + return torch.FloatTensor(sigs) + +def get_mask_aabb(masks): + if masks.numel() == 0: + return torch.zeros((0, 4), device=masks.device, dtype=torch.int) + + b = masks.shape[0] + + bounding_boxes = torch.zeros((b, 4), device=masks.device, dtype=torch.int) + is_empty = torch.zeros((b), device=masks.device, dtype=torch.bool) + for i in range(b): + mask = masks[i] + if mask.numel() == 0: + continue + if torch.max(mask != 0) == False: + is_empty[i] = True + continue + y, x = torch.where(mask) + bounding_boxes[i, 0] = torch.min(x) + bounding_boxes[i, 1] = torch.min(y) + bounding_boxes[i, 2] = torch.max(x) + bounding_boxes[i, 3] = torch.max(y) + + return bounding_boxes, is_empty + +def resolve_areas_and_cond_masks(conditions, h, w, device): + # We need to decide on an area outside the sampling loop in order to properly generate opposite areas of equal sizes. + # While we're doing this, we can also resolve the mask device and scaling for performance reasons + for i in range(len(conditions)): + c = conditions[i] + if 'area' in c: + area = c['area'] + if area[0] == "percentage": + modified = c.copy() + area = (max(1, round(area[1] * h)), max(1, round(area[2] * w)), round(area[3] * h), round(area[4] * w)) + modified['area'] = area + c = modified + conditions[i] = c + + if 'mask' in c: + mask = c['mask'] + mask = mask.to(device=device) + modified = c.copy() + if len(mask.shape) == 2: + mask = mask.unsqueeze(0) + if mask.shape[1] != h or mask.shape[2] != w: + mask = torch.nn.functional.interpolate(mask.unsqueeze(1), size=(h, w), mode='bilinear', align_corners=False).squeeze(1) + + if modified.get("set_area_to_bounds", False): + bounds = torch.max(torch.abs(mask),dim=0).values.unsqueeze(0) + boxes, is_empty = get_mask_aabb(bounds) + if is_empty[0]: + # Use the minimum possible size for efficiency reasons. (Since the mask is all-0, this becomes a noop anyway) + modified['area'] = (8, 8, 0, 0) + else: + box = boxes[0] + H, W, Y, X = (box[3] - box[1] + 1, box[2] - box[0] + 1, box[1], box[0]) + H = max(8, H) + W = max(8, W) + area = (int(H), int(W), int(Y), int(X)) + modified['area'] = area + + modified['mask'] = mask + conditions[i] = modified + +def create_cond_with_same_area_if_none(conds, c): + if 'area' not in c: + return + + c_area = c['area'] + smallest = None + for x in conds: + if 'area' in x: + a = x['area'] + if c_area[2] >= a[2] and c_area[3] >= a[3]: + if a[0] + a[2] >= c_area[0] + c_area[2]: + if a[1] + a[3] >= c_area[1] + c_area[3]: + if smallest is None: + smallest = x + elif 'area' not in smallest: + smallest = x + else: + if smallest['area'][0] * smallest['area'][1] > a[0] * a[1]: + smallest = x + else: + if smallest is None: + smallest = x + if smallest is None: + return + if 'area' in smallest: + if smallest['area'] == c_area: + return + + out = c.copy() + out['model_conds'] = smallest['model_conds'].copy() #TODO: which fields should be copied? + conds += [out] + +def calculate_start_end_timesteps(model, conds): + s = model.model_sampling + for t in range(len(conds)): + x = conds[t] + + timestep_start = None + timestep_end = None + if 'start_percent' in x: + timestep_start = s.percent_to_sigma(x['start_percent']) + if 'end_percent' in x: + timestep_end = s.percent_to_sigma(x['end_percent']) + + if (timestep_start is not None) or (timestep_end is not None): + n = x.copy() + if (timestep_start is not None): + n['timestep_start'] = timestep_start + if (timestep_end is not None): + n['timestep_end'] = timestep_end + conds[t] = n + +def pre_run_control(model, conds): + s = model.model_sampling + for t in range(len(conds)): + x = conds[t] + + timestep_start = None + timestep_end = None + percent_to_timestep_function = lambda a: s.percent_to_sigma(a) + if 'control' in x: + x['control'].pre_run(model, percent_to_timestep_function) + +def apply_empty_x_to_equal_area(conds, uncond, name, uncond_fill_func): + cond_cnets = [] + cond_other = [] + uncond_cnets = [] + uncond_other = [] + for t in range(len(conds)): + x = conds[t] + if 'area' not in x: + if name in x and x[name] is not None: + cond_cnets.append(x[name]) + else: + cond_other.append((x, t)) + for t in range(len(uncond)): + x = uncond[t] + if 'area' not in x: + if name in x and x[name] is not None: + uncond_cnets.append(x[name]) + else: + uncond_other.append((x, t)) + + if len(uncond_cnets) > 0: + return + + for x in range(len(cond_cnets)): + temp = uncond_other[x % len(uncond_other)] + o = temp[0] + if name in o and o[name] is not None: + n = o.copy() + n[name] = uncond_fill_func(cond_cnets, x) + uncond += [n] + else: + n = o.copy() + n[name] = uncond_fill_func(cond_cnets, x) + uncond[temp[1]] = n + +def encode_model_conds(model_function, conds, noise, device, prompt_type, **kwargs): + for t in range(len(conds)): + x = conds[t] + params = x.copy() + params["device"] = device + params["noise"] = noise + params["width"] = params.get("width", noise.shape[3] * 8) + params["height"] = params.get("height", noise.shape[2] * 8) + params["prompt_type"] = params.get("prompt_type", prompt_type) + for k in kwargs: + if k not in params: + params[k] = kwargs[k] + + out = model_function(**params) + x = x.copy() + model_conds = x['model_conds'].copy() + for k in out: + model_conds[k] = out[k] + x['model_conds'] = model_conds + conds[t] = x + return conds + +class Sampler: + def sample(self): + pass + + def max_denoise(self, model_wrap, sigmas): + max_sigma = float(model_wrap.inner_model.model_sampling.sigma_max) + sigma = float(sigmas[0]) + return math.isclose(max_sigma, sigma, rel_tol=1e-05) or sigma > max_sigma + +class UNIPC(Sampler): + def sample(self, model_wrap, sigmas, extra_args, callback, noise, latent_image=None, denoise_mask=None, disable_pbar=False): + return uni_pc.sample_unipc(model_wrap, noise, latent_image, sigmas, max_denoise=self.max_denoise(model_wrap, sigmas), extra_args=extra_args, noise_mask=denoise_mask, callback=callback, disable=disable_pbar) + +class UNIPCBH2(Sampler): + def sample(self, model_wrap, sigmas, extra_args, callback, noise, latent_image=None, denoise_mask=None, disable_pbar=False): + return uni_pc.sample_unipc(model_wrap, noise, latent_image, sigmas, max_denoise=self.max_denoise(model_wrap, sigmas), extra_args=extra_args, noise_mask=denoise_mask, callback=callback, variant='bh2', disable=disable_pbar) + +KSAMPLER_NAMES = ["euler", "euler_ancestral", "heun", "heunpp2","dpm_2", "dpm_2_ancestral", + "lms", "dpm_fast", "dpm_adaptive", "dpmpp_2s_ancestral", "dpmpp_sde", "dpmpp_sde_gpu", + "dpmpp_2m", "dpmpp_2m_sde", "dpmpp_2m_sde_gpu", "dpmpp_3m_sde", "dpmpp_3m_sde_gpu", "ddpm", "lcm"] + +class KSAMPLER(Sampler): + def __init__(self, sampler_function, extra_options={}, inpaint_options={}): + self.sampler_function = sampler_function + self.extra_options = extra_options + self.inpaint_options = inpaint_options + + def sample(self, model_wrap, sigmas, extra_args, callback, noise, latent_image=None, denoise_mask=None, disable_pbar=False): + extra_args["denoise_mask"] = denoise_mask + model_k = KSamplerX0Inpaint(model_wrap) + model_k.latent_image = latent_image + if self.inpaint_options.get("random", False): #TODO: Should this be the default? + generator = torch.manual_seed(extra_args.get("seed", 41) + 1) + model_k.noise = torch.randn(noise.shape, generator=generator, device="cpu").to(noise.dtype).to(noise.device) + else: + model_k.noise = noise + + if self.max_denoise(model_wrap, sigmas): + noise = noise * torch.sqrt(1.0 + sigmas[0] ** 2.0) + else: + noise = noise * sigmas[0] + + k_callback = None + total_steps = len(sigmas) - 1 + if callback is not None: + k_callback = lambda x: callback(x["i"], x["denoised"], x["x"], total_steps) + + if latent_image is not None: + noise += latent_image + + samples = self.sampler_function(model_k, noise, sigmas, extra_args=extra_args, callback=k_callback, disable=disable_pbar, **self.extra_options) + return samples + + +def ksampler(sampler_name, extra_options={}, inpaint_options={}): + if sampler_name == "dpm_fast": + def dpm_fast_function(model, noise, sigmas, extra_args, callback, disable): + sigma_min = sigmas[-1] + if sigma_min == 0: + sigma_min = sigmas[-2] + total_steps = len(sigmas) - 1 + return k_diffusion_sampling.sample_dpm_fast(model, noise, sigma_min, sigmas[0], total_steps, extra_args=extra_args, callback=callback, disable=disable) + sampler_function = dpm_fast_function + elif sampler_name == "dpm_adaptive": + def dpm_adaptive_function(model, noise, sigmas, extra_args, callback, disable): + sigma_min = sigmas[-1] + if sigma_min == 0: + sigma_min = sigmas[-2] + return k_diffusion_sampling.sample_dpm_adaptive(model, noise, sigma_min, sigmas[0], extra_args=extra_args, callback=callback, disable=disable) + sampler_function = dpm_adaptive_function + else: + sampler_function = getattr(k_diffusion_sampling, "sample_{}".format(sampler_name)) + + return KSAMPLER(sampler_function, extra_options, inpaint_options) + +def wrap_model(model): + model_denoise = CFGNoisePredictor(model) + return model_denoise + +def sample(model, noise, positive, negative, cfg, device, sampler, sigmas, model_options={}, latent_image=None, denoise_mask=None, callback=None, disable_pbar=False, seed=None): + positive = positive[:] + negative = negative[:] + + resolve_areas_and_cond_masks(positive, noise.shape[2], noise.shape[3], device) + resolve_areas_and_cond_masks(negative, noise.shape[2], noise.shape[3], device) + + model_wrap = wrap_model(model) + + calculate_start_end_timesteps(model, negative) + calculate_start_end_timesteps(model, positive) + + if latent_image is not None: + latent_image = model.process_latent_in(latent_image) + + if hasattr(model, 'extra_conds'): + positive = encode_model_conds(model.extra_conds, positive, noise, device, "positive", latent_image=latent_image, denoise_mask=denoise_mask, seed=seed) + negative = encode_model_conds(model.extra_conds, negative, noise, device, "negative", latent_image=latent_image, denoise_mask=denoise_mask, seed=seed) + + #make sure each cond area has an opposite one with the same area + for c in positive: + create_cond_with_same_area_if_none(negative, c) + for c in negative: + create_cond_with_same_area_if_none(positive, c) + + pre_run_control(model, negative + positive) + + apply_empty_x_to_equal_area(list(filter(lambda c: c.get('control_apply_to_uncond', False) == True, positive)), negative, 'control', lambda cond_cnets, x: cond_cnets[x]) + apply_empty_x_to_equal_area(positive, negative, 'gligen', lambda cond_cnets, x: cond_cnets[x]) + + extra_args = {"cond":positive, "uncond":negative, "cond_scale": cfg, "model_options": model_options, "seed":seed} + + samples = sampler.sample(model_wrap, sigmas, extra_args, callback, noise, latent_image, denoise_mask, disable_pbar) + return model.process_latent_out(samples.to(torch.float32)) + +SCHEDULER_NAMES = ["normal", "karras", "exponential", "sgm_uniform", "simple", "ddim_uniform"] +SAMPLER_NAMES = KSAMPLER_NAMES + ["ddim", "uni_pc", "uni_pc_bh2"] + +def calculate_sigmas_scheduler(model, scheduler_name, steps): + if scheduler_name == "karras": + sigmas = k_diffusion_sampling.get_sigmas_karras(n=steps, sigma_min=float(model.model_sampling.sigma_min), sigma_max=float(model.model_sampling.sigma_max)) + elif scheduler_name == "exponential": + sigmas = k_diffusion_sampling.get_sigmas_exponential(n=steps, sigma_min=float(model.model_sampling.sigma_min), sigma_max=float(model.model_sampling.sigma_max)) + elif scheduler_name == "normal": + sigmas = normal_scheduler(model, steps) + elif scheduler_name == "simple": + sigmas = simple_scheduler(model, steps) + elif scheduler_name == "ddim_uniform": + sigmas = ddim_scheduler(model, steps) + elif scheduler_name == "sgm_uniform": + sigmas = normal_scheduler(model, steps, sgm=True) + else: + print("error invalid scheduler", scheduler_name) + return sigmas + +def sampler_object(name): + if name == "uni_pc": + sampler = UNIPC() + elif name == "uni_pc_bh2": + sampler = UNIPCBH2() + elif name == "ddim": + sampler = ksampler("euler", inpaint_options={"random": True}) + else: + sampler = ksampler(name) + return sampler + +class KSampler: + SCHEDULERS = SCHEDULER_NAMES + SAMPLERS = SAMPLER_NAMES + + def __init__(self, model, steps, device, sampler=None, scheduler=None, denoise=None, model_options={}): + self.model = model + self.device = device + if scheduler not in self.SCHEDULERS: + scheduler = self.SCHEDULERS[0] + if sampler not in self.SAMPLERS: + sampler = self.SAMPLERS[0] + self.scheduler = scheduler + self.sampler = sampler + self.set_steps(steps, denoise) + self.denoise = denoise + self.model_options = model_options + + def calculate_sigmas(self, steps): + sigmas = None + + discard_penultimate_sigma = False + if self.sampler in ['dpm_2', 'dpm_2_ancestral', 'uni_pc', 'uni_pc_bh2']: + steps += 1 + discard_penultimate_sigma = True + + sigmas = calculate_sigmas_scheduler(self.model, self.scheduler, steps) + + if discard_penultimate_sigma: + sigmas = torch.cat([sigmas[:-2], sigmas[-1:]]) + return sigmas + + def set_steps(self, steps, denoise=None): + self.steps = steps + if denoise is None or denoise > 0.9999: + self.sigmas = self.calculate_sigmas(steps).to(self.device) + else: + new_steps = int(steps/denoise) + sigmas = self.calculate_sigmas(new_steps).to(self.device) + self.sigmas = sigmas[-(steps + 1):] + + def sample(self, noise, positive, negative, cfg, latent_image=None, start_step=None, last_step=None, force_full_denoise=False, denoise_mask=None, sigmas=None, callback=None, disable_pbar=False, seed=None): + if sigmas is None: + sigmas = self.sigmas + + if last_step is not None and last_step < (len(sigmas) - 1): + sigmas = sigmas[:last_step + 1] + if force_full_denoise: + sigmas[-1] = 0 + + if start_step is not None: + if start_step < (len(sigmas) - 1): + sigmas = sigmas[start_step:] + else: + if latent_image is not None: + return latent_image + else: + return torch.zeros_like(noise) + + sampler = sampler_object(self.sampler) + + return sample(self.model, noise, positive, negative, cfg, self.device, sampler, sigmas, self.model_options, latent_image=latent_image, denoise_mask=denoise_mask, callback=callback, disable_pbar=disable_pbar, seed=seed) diff --git a/ldm_patched/modules/sd.py b/ldm_patched/modules/sd.py new file mode 100644 index 0000000000000000000000000000000000000000..e197c39caf79bc03e136101feb46f4ad06756d3a --- /dev/null +++ b/ldm_patched/modules/sd.py @@ -0,0 +1,544 @@ +import torch + +from ldm_patched.modules import model_management +from ldm_patched.ldm.models.autoencoder import AutoencoderKL, AutoencodingEngine +import yaml + +import ldm_patched.modules.utils + +from . import clip_vision +from . import gligen +from . import diffusers_convert +from . import model_base +from . import model_detection + +from . import sd1_clip +from . import sd2_clip +from . import sdxl_clip + +import ldm_patched.modules.model_patcher +import ldm_patched.modules.lora +import ldm_patched.t2ia.adapter +import ldm_patched.modules.supported_models_base +import ldm_patched.taesd.taesd + +def load_model_weights(model, sd): + m, u = model.load_state_dict(sd, strict=False) + m = set(m) + unexpected_keys = set(u) + + k = list(sd.keys()) + for x in k: + if x not in unexpected_keys: + w = sd.pop(x) + del w + if len(m) > 0: + print("extra", m) + return model + +def load_clip_weights(model, sd): + k = list(sd.keys()) + for x in k: + if x.startswith("cond_stage_model.transformer.") and not x.startswith("cond_stage_model.transformer.text_model."): + y = x.replace("cond_stage_model.transformer.", "cond_stage_model.transformer.text_model.") + sd[y] = sd.pop(x) + + if 'cond_stage_model.transformer.text_model.embeddings.position_ids' in sd: + ids = sd['cond_stage_model.transformer.text_model.embeddings.position_ids'] + if ids.dtype == torch.float32: + sd['cond_stage_model.transformer.text_model.embeddings.position_ids'] = ids.round() + + sd = ldm_patched.modules.utils.transformers_convert(sd, "cond_stage_model.model.", "cond_stage_model.transformer.text_model.", 24) + return load_model_weights(model, sd) + + +def load_lora_for_models(model, clip, lora, strength_model, strength_clip): + key_map = {} + if model is not None: + key_map = ldm_patched.modules.lora.model_lora_keys_unet(model.model, key_map) + if clip is not None: + key_map = ldm_patched.modules.lora.model_lora_keys_clip(clip.cond_stage_model, key_map) + + loaded = ldm_patched.modules.lora.load_lora(lora, key_map) + if model is not None: + new_modelpatcher = model.clone() + k = new_modelpatcher.add_patches(loaded, strength_model) + else: + k = () + new_modelpatcher = None + + if clip is not None: + new_clip = clip.clone() + k1 = new_clip.add_patches(loaded, strength_clip) + else: + k1 = () + new_clip = None + k = set(k) + k1 = set(k1) + for x in loaded: + if (x not in k) and (x not in k1): + print("NOT LOADED", x) + + return (new_modelpatcher, new_clip) + + +class CLIP: + def __init__(self, target=None, embedding_directory=None, no_init=False): + if no_init: + return + params = target.params.copy() + clip = target.clip + tokenizer = target.tokenizer + + load_device = model_management.text_encoder_device() + offload_device = model_management.text_encoder_offload_device() + params['device'] = offload_device + params['dtype'] = model_management.text_encoder_dtype(load_device) + + self.cond_stage_model = clip(**(params)) + + self.tokenizer = tokenizer(embedding_directory=embedding_directory) + self.patcher = ldm_patched.modules.model_patcher.ModelPatcher(self.cond_stage_model, load_device=load_device, offload_device=offload_device) + self.layer_idx = None + + def clone(self): + n = CLIP(no_init=True) + n.patcher = self.patcher.clone() + n.cond_stage_model = self.cond_stage_model + n.tokenizer = self.tokenizer + n.layer_idx = self.layer_idx + return n + + def add_patches(self, patches, strength_patch=1.0, strength_model=1.0): + return self.patcher.add_patches(patches, strength_patch, strength_model) + + def clip_layer(self, layer_idx): + self.layer_idx = layer_idx + + def tokenize(self, text, return_word_ids=False): + return self.tokenizer.tokenize_with_weights(text, return_word_ids) + + def encode_from_tokens(self, tokens, return_pooled=False): + if self.layer_idx is not None: + self.cond_stage_model.clip_layer(self.layer_idx) + else: + self.cond_stage_model.reset_clip_layer() + + self.load_model() + cond, pooled = self.cond_stage_model.encode_token_weights(tokens) + if return_pooled: + return cond, pooled + return cond + + def encode(self, text): + tokens = self.tokenize(text) + return self.encode_from_tokens(tokens) + + def load_sd(self, sd): + return self.cond_stage_model.load_sd(sd) + + def get_sd(self): + return self.cond_stage_model.state_dict() + + def load_model(self): + model_management.load_model_gpu(self.patcher) + return self.patcher + + def get_key_patches(self): + return self.patcher.get_key_patches() + +class VAE: + def __init__(self, sd=None, device=None, config=None, dtype=None): + if 'decoder.up_blocks.0.resnets.0.norm1.weight' in sd.keys(): #diffusers format + sd = diffusers_convert.convert_vae_state_dict(sd) + + self.memory_used_encode = lambda shape, dtype: (1767 * shape[2] * shape[3]) * model_management.dtype_size(dtype) #These are for AutoencoderKL and need tweaking (should be lower) + self.memory_used_decode = lambda shape, dtype: (2178 * shape[2] * shape[3] * 64) * model_management.dtype_size(dtype) + self.downscale_ratio = 8 + self.latent_channels = 4 + + if config is None: + if "decoder.mid.block_1.mix_factor" in sd: + encoder_config = {'double_z': True, 'z_channels': 4, 'resolution': 256, 'in_channels': 3, 'out_ch': 3, 'ch': 128, 'ch_mult': [1, 2, 4, 4], 'num_res_blocks': 2, 'attn_resolutions': [], 'dropout': 0.0} + decoder_config = encoder_config.copy() + decoder_config["video_kernel_size"] = [3, 1, 1] + decoder_config["alpha"] = 0.0 + self.first_stage_model = AutoencodingEngine(regularizer_config={'target': "ldm_patched.ldm.models.autoencoder.DiagonalGaussianRegularizer"}, + encoder_config={'target': "ldm_patched.ldm.modules.diffusionmodules.model.Encoder", 'params': encoder_config}, + decoder_config={'target': "ldm_patched.ldm.modules.temporal_ae.VideoDecoder", 'params': decoder_config}) + elif "taesd_decoder.1.weight" in sd: + self.first_stage_model = ldm_patched.taesd.taesd.TAESD() + else: + #default SD1.x/SD2.x VAE parameters + ddconfig = {'double_z': True, 'z_channels': 4, 'resolution': 256, 'in_channels': 3, 'out_ch': 3, 'ch': 128, 'ch_mult': [1, 2, 4, 4], 'num_res_blocks': 2, 'attn_resolutions': [], 'dropout': 0.0} + + if 'encoder.down.2.downsample.conv.weight' not in sd: #Stable diffusion x4 upscaler VAE + ddconfig['ch_mult'] = [1, 2, 4] + self.downscale_ratio = 4 + + self.first_stage_model = AutoencoderKL(ddconfig=ddconfig, embed_dim=4) + else: + self.first_stage_model = AutoencoderKL(**(config['params'])) + self.first_stage_model = self.first_stage_model.eval() + + m, u = self.first_stage_model.load_state_dict(sd, strict=False) + if len(m) > 0: + print("Missing VAE keys", m) + + if len(u) > 0: + print("Leftover VAE keys", u) + + if device is None: + device = model_management.vae_device() + self.device = device + offload_device = model_management.vae_offload_device() + if dtype is None: + dtype = model_management.vae_dtype() + self.vae_dtype = dtype + self.first_stage_model.to(self.vae_dtype) + self.output_device = model_management.intermediate_device() + + self.patcher = ldm_patched.modules.model_patcher.ModelPatcher(self.first_stage_model, load_device=self.device, offload_device=offload_device) + + def decode_tiled_(self, samples, tile_x=64, tile_y=64, overlap = 16): + steps = samples.shape[0] * ldm_patched.modules.utils.get_tiled_scale_steps(samples.shape[3], samples.shape[2], tile_x, tile_y, overlap) + steps += samples.shape[0] * ldm_patched.modules.utils.get_tiled_scale_steps(samples.shape[3], samples.shape[2], tile_x // 2, tile_y * 2, overlap) + steps += samples.shape[0] * ldm_patched.modules.utils.get_tiled_scale_steps(samples.shape[3], samples.shape[2], tile_x * 2, tile_y // 2, overlap) + pbar = ldm_patched.modules.utils.ProgressBar(steps) + + decode_fn = lambda a: (self.first_stage_model.decode(a.to(self.vae_dtype).to(self.device)) + 1.0).float() + output = torch.clamp(( + (ldm_patched.modules.utils.tiled_scale(samples, decode_fn, tile_x // 2, tile_y * 2, overlap, upscale_amount = self.downscale_ratio, output_device=self.output_device, pbar = pbar) + + ldm_patched.modules.utils.tiled_scale(samples, decode_fn, tile_x * 2, tile_y // 2, overlap, upscale_amount = self.downscale_ratio, output_device=self.output_device, pbar = pbar) + + ldm_patched.modules.utils.tiled_scale(samples, decode_fn, tile_x, tile_y, overlap, upscale_amount = self.downscale_ratio, output_device=self.output_device, pbar = pbar)) + / 3.0) / 2.0, min=0.0, max=1.0) + return output + + def encode_tiled_(self, pixel_samples, tile_x=512, tile_y=512, overlap = 64): + steps = pixel_samples.shape[0] * ldm_patched.modules.utils.get_tiled_scale_steps(pixel_samples.shape[3], pixel_samples.shape[2], tile_x, tile_y, overlap) + steps += pixel_samples.shape[0] * ldm_patched.modules.utils.get_tiled_scale_steps(pixel_samples.shape[3], pixel_samples.shape[2], tile_x // 2, tile_y * 2, overlap) + steps += pixel_samples.shape[0] * ldm_patched.modules.utils.get_tiled_scale_steps(pixel_samples.shape[3], pixel_samples.shape[2], tile_x * 2, tile_y // 2, overlap) + pbar = ldm_patched.modules.utils.ProgressBar(steps) + + encode_fn = lambda a: self.first_stage_model.encode((2. * a - 1.).to(self.vae_dtype).to(self.device)).float() + samples = ldm_patched.modules.utils.tiled_scale(pixel_samples, encode_fn, tile_x, tile_y, overlap, upscale_amount = (1/self.downscale_ratio), out_channels=self.latent_channels, output_device=self.output_device, pbar=pbar) + samples += ldm_patched.modules.utils.tiled_scale(pixel_samples, encode_fn, tile_x * 2, tile_y // 2, overlap, upscale_amount = (1/self.downscale_ratio), out_channels=self.latent_channels, output_device=self.output_device, pbar=pbar) + samples += ldm_patched.modules.utils.tiled_scale(pixel_samples, encode_fn, tile_x // 2, tile_y * 2, overlap, upscale_amount = (1/self.downscale_ratio), out_channels=self.latent_channels, output_device=self.output_device, pbar=pbar) + samples /= 3.0 + return samples + + def decode(self, samples_in): + try: + memory_used = self.memory_used_decode(samples_in.shape, self.vae_dtype) + model_management.load_models_gpu([self.patcher], memory_required=memory_used) + free_memory = model_management.get_free_memory(self.device) + batch_number = int(free_memory / memory_used) + batch_number = max(1, batch_number) + + pixel_samples = torch.empty((samples_in.shape[0], 3, round(samples_in.shape[2] * self.downscale_ratio), round(samples_in.shape[3] * self.downscale_ratio)), device=self.output_device) + for x in range(0, samples_in.shape[0], batch_number): + samples = samples_in[x:x+batch_number].to(self.vae_dtype).to(self.device) + pixel_samples[x:x+batch_number] = torch.clamp((self.first_stage_model.decode(samples).to(self.output_device).float() + 1.0) / 2.0, min=0.0, max=1.0) + except model_management.OOM_EXCEPTION as e: + print("Warning: Ran out of memory when regular VAE decoding, retrying with tiled VAE decoding.") + pixel_samples = self.decode_tiled_(samples_in) + + pixel_samples = pixel_samples.to(self.output_device).movedim(1,-1) + return pixel_samples + + def decode_tiled(self, samples, tile_x=64, tile_y=64, overlap = 16): + model_management.load_model_gpu(self.patcher) + output = self.decode_tiled_(samples, tile_x, tile_y, overlap) + return output.movedim(1,-1) + + def encode(self, pixel_samples): + pixel_samples = pixel_samples.movedim(-1,1) + try: + memory_used = self.memory_used_encode(pixel_samples.shape, self.vae_dtype) + model_management.load_models_gpu([self.patcher], memory_required=memory_used) + free_memory = model_management.get_free_memory(self.device) + batch_number = int(free_memory / memory_used) + batch_number = max(1, batch_number) + samples = torch.empty((pixel_samples.shape[0], self.latent_channels, round(pixel_samples.shape[2] // self.downscale_ratio), round(pixel_samples.shape[3] // self.downscale_ratio)), device=self.output_device) + for x in range(0, pixel_samples.shape[0], batch_number): + pixels_in = (2. * pixel_samples[x:x+batch_number] - 1.).to(self.vae_dtype).to(self.device) + samples[x:x+batch_number] = self.first_stage_model.encode(pixels_in).to(self.output_device).float() + + except model_management.OOM_EXCEPTION as e: + print("Warning: Ran out of memory when regular VAE encoding, retrying with tiled VAE encoding.") + samples = self.encode_tiled_(pixel_samples) + + return samples + + def encode_tiled(self, pixel_samples, tile_x=512, tile_y=512, overlap = 64): + model_management.load_model_gpu(self.patcher) + pixel_samples = pixel_samples.movedim(-1,1) + samples = self.encode_tiled_(pixel_samples, tile_x=tile_x, tile_y=tile_y, overlap=overlap) + return samples + + def get_sd(self): + return self.first_stage_model.state_dict() + +class StyleModel: + def __init__(self, model, device="cpu"): + self.model = model + + def get_cond(self, input): + return self.model(input.last_hidden_state) + + +def load_style_model(ckpt_path): + model_data = ldm_patched.modules.utils.load_torch_file(ckpt_path, safe_load=True) + keys = model_data.keys() + if "style_embedding" in keys: + model = ldm_patched.t2ia.adapter.StyleAdapter(width=1024, context_dim=768, num_head=8, n_layes=3, num_token=8) + else: + raise Exception("invalid style model {}".format(ckpt_path)) + model.load_state_dict(model_data) + return StyleModel(model) + + +def load_clip(ckpt_paths, embedding_directory=None): + clip_data = [] + for p in ckpt_paths: + clip_data.append(ldm_patched.modules.utils.load_torch_file(p, safe_load=True)) + + class EmptyClass: + pass + + for i in range(len(clip_data)): + if "transformer.resblocks.0.ln_1.weight" in clip_data[i]: + clip_data[i] = ldm_patched.modules.utils.transformers_convert(clip_data[i], "", "text_model.", 32) + + clip_target = EmptyClass() + clip_target.params = {} + if len(clip_data) == 1: + if "text_model.encoder.layers.30.mlp.fc1.weight" in clip_data[0]: + clip_target.clip = sdxl_clip.SDXLRefinerClipModel + clip_target.tokenizer = sdxl_clip.SDXLTokenizer + elif "text_model.encoder.layers.22.mlp.fc1.weight" in clip_data[0]: + clip_target.clip = sd2_clip.SD2ClipModel + clip_target.tokenizer = sd2_clip.SD2Tokenizer + else: + clip_target.clip = sd1_clip.SD1ClipModel + clip_target.tokenizer = sd1_clip.SD1Tokenizer + else: + clip_target.clip = sdxl_clip.SDXLClipModel + clip_target.tokenizer = sdxl_clip.SDXLTokenizer + + clip = CLIP(clip_target, embedding_directory=embedding_directory) + for c in clip_data: + m, u = clip.load_sd(c) + if len(m) > 0: + print("clip missing:", m) + + if len(u) > 0: + print("clip unexpected:", u) + return clip + +def load_gligen(ckpt_path): + data = ldm_patched.modules.utils.load_torch_file(ckpt_path, safe_load=True) + model = gligen.load_gligen(data) + if model_management.should_use_fp16(): + model = model.half() + return ldm_patched.modules.model_patcher.ModelPatcher(model, load_device=model_management.get_torch_device(), offload_device=model_management.unet_offload_device()) + +def load_checkpoint(config_path=None, ckpt_path=None, output_vae=True, output_clip=True, embedding_directory=None, state_dict=None, config=None): + #TODO: this function is a mess and should be removed eventually + if config is None: + with open(config_path, 'r') as stream: + config = yaml.safe_load(stream) + model_config_params = config['model']['params'] + clip_config = model_config_params['cond_stage_config'] + scale_factor = model_config_params['scale_factor'] + vae_config = model_config_params['first_stage_config'] + + fp16 = False + if "unet_config" in model_config_params: + if "params" in model_config_params["unet_config"]: + unet_config = model_config_params["unet_config"]["params"] + if "use_fp16" in unet_config: + fp16 = unet_config.pop("use_fp16") + if fp16: + unet_config["dtype"] = torch.float16 + + noise_aug_config = None + if "noise_aug_config" in model_config_params: + noise_aug_config = model_config_params["noise_aug_config"] + + model_type = model_base.ModelType.EPS + + if "parameterization" in model_config_params: + if model_config_params["parameterization"] == "v": + model_type = model_base.ModelType.V_PREDICTION + + clip = None + vae = None + + class WeightsLoader(torch.nn.Module): + pass + + if state_dict is None: + state_dict = ldm_patched.modules.utils.load_torch_file(ckpt_path) + + class EmptyClass: + pass + + model_config = ldm_patched.modules.supported_models_base.BASE({}) + + from . import latent_formats + model_config.latent_format = latent_formats.SD15(scale_factor=scale_factor) + model_config.unet_config = model_detection.convert_config(unet_config) + + if config['model']["target"].endswith("ImageEmbeddingConditionedLatentDiffusion"): + model = model_base.SD21UNCLIP(model_config, noise_aug_config["params"], model_type=model_type) + else: + model = model_base.BaseModel(model_config, model_type=model_type) + + if config['model']["target"].endswith("LatentInpaintDiffusion"): + model.set_inpaint() + + if fp16: + model = model.half() + + offload_device = model_management.unet_offload_device() + model = model.to(offload_device) + model.load_model_weights(state_dict, "model.diffusion_model.") + + if output_vae: + vae_sd = ldm_patched.modules.utils.state_dict_prefix_replace(state_dict, {"first_stage_model.": ""}, filter_keys=True) + vae = VAE(sd=vae_sd, config=vae_config) + + if output_clip: + w = WeightsLoader() + clip_target = EmptyClass() + clip_target.params = clip_config.get("params", {}) + if clip_config["target"].endswith("FrozenOpenCLIPEmbedder"): + clip_target.clip = sd2_clip.SD2ClipModel + clip_target.tokenizer = sd2_clip.SD2Tokenizer + clip = CLIP(clip_target, embedding_directory=embedding_directory) + w.cond_stage_model = clip.cond_stage_model.clip_h + elif clip_config["target"].endswith("FrozenCLIPEmbedder"): + clip_target.clip = sd1_clip.SD1ClipModel + clip_target.tokenizer = sd1_clip.SD1Tokenizer + clip = CLIP(clip_target, embedding_directory=embedding_directory) + w.cond_stage_model = clip.cond_stage_model.clip_l + load_clip_weights(w, state_dict) + + return (ldm_patched.modules.model_patcher.ModelPatcher(model, load_device=model_management.get_torch_device(), offload_device=offload_device), clip, vae) + +def load_checkpoint_guess_config(ckpt_path, output_vae=True, output_clip=True, output_clipvision=False, embedding_directory=None, output_model=True): + sd = ldm_patched.modules.utils.load_torch_file(ckpt_path) + sd_keys = sd.keys() + clip = None + clipvision = None + vae = None + model = None + model_patcher = None + clip_target = None + + parameters = ldm_patched.modules.utils.calculate_parameters(sd, "model.diffusion_model.") + unet_dtype = model_management.unet_dtype(model_params=parameters) + load_device = model_management.get_torch_device() + manual_cast_dtype = model_management.unet_manual_cast(unet_dtype, load_device) + + class WeightsLoader(torch.nn.Module): + pass + + model_config = model_detection.model_config_from_unet(sd, "model.diffusion_model.", unet_dtype) + model_config.set_manual_cast(manual_cast_dtype) + + if model_config is None: + raise RuntimeError("ERROR: Could not detect model type of: {}".format(ckpt_path)) + + if model_config.clip_vision_prefix is not None: + if output_clipvision: + clipvision = clip_vision.load_clipvision_from_sd(sd, model_config.clip_vision_prefix, True) + + if output_model: + inital_load_device = model_management.unet_inital_load_device(parameters, unet_dtype) + offload_device = model_management.unet_offload_device() + model = model_config.get_model(sd, "model.diffusion_model.", device=inital_load_device) + model.load_model_weights(sd, "model.diffusion_model.") + + if output_vae: + vae_sd = ldm_patched.modules.utils.state_dict_prefix_replace(sd, {"first_stage_model.": ""}, filter_keys=True) + vae_sd = model_config.process_vae_state_dict(vae_sd) + vae = VAE(sd=vae_sd) + + if output_clip: + w = WeightsLoader() + clip_target = model_config.clip_target() + if clip_target is not None: + clip = CLIP(clip_target, embedding_directory=embedding_directory) + w.cond_stage_model = clip.cond_stage_model + sd = model_config.process_clip_state_dict(sd) + load_model_weights(w, sd) + + left_over = sd.keys() + if len(left_over) > 0: + print("left over keys:", left_over) + + if output_model: + model_patcher = ldm_patched.modules.model_patcher.ModelPatcher(model, load_device=load_device, offload_device=model_management.unet_offload_device(), current_device=inital_load_device) + if inital_load_device != torch.device("cpu"): + print("loaded straight to GPU") + model_management.load_model_gpu(model_patcher) + + return (model_patcher, clip, vae, clipvision) + + +def load_unet_state_dict(sd): #load unet in diffusers format + parameters = ldm_patched.modules.utils.calculate_parameters(sd) + unet_dtype = model_management.unet_dtype(model_params=parameters) + load_device = model_management.get_torch_device() + manual_cast_dtype = model_management.unet_manual_cast(unet_dtype, load_device) + + if "input_blocks.0.0.weight" in sd: #ldm + model_config = model_detection.model_config_from_unet(sd, "", unet_dtype) + if model_config is None: + return None + new_sd = sd + + else: #diffusers + model_config = model_detection.model_config_from_diffusers_unet(sd, unet_dtype) + if model_config is None: + return None + + diffusers_keys = ldm_patched.modules.utils.unet_to_diffusers(model_config.unet_config) + + new_sd = {} + for k in diffusers_keys: + if k in sd: + new_sd[diffusers_keys[k]] = sd.pop(k) + else: + print(diffusers_keys[k], k) + offload_device = model_management.unet_offload_device() + model_config.set_manual_cast(manual_cast_dtype) + model = model_config.get_model(new_sd, "") + model = model.to(offload_device) + model.load_model_weights(new_sd, "") + left_over = sd.keys() + if len(left_over) > 0: + print("left over keys in unet:", left_over) + return ldm_patched.modules.model_patcher.ModelPatcher(model, load_device=load_device, offload_device=offload_device) + +def load_unet(unet_path): + sd = ldm_patched.modules.utils.load_torch_file(unet_path) + model = load_unet_state_dict(sd) + if model is None: + print("ERROR UNSUPPORTED UNET", unet_path) + raise RuntimeError("ERROR: Could not detect model type of: {}".format(unet_path)) + return model + +def save_checkpoint(output_path, model, clip=None, vae=None, clip_vision=None, metadata=None): + clip_sd = None + load_models = [model] + if clip is not None: + load_models.append(clip.load_model()) + clip_sd = clip.get_sd() + + model_management.load_models_gpu(load_models) + clip_vision_sd = clip_vision.get_sd() if clip_vision is not None else None + sd = model.model.state_dict_for_saving(clip_sd, vae.get_sd(), clip_vision_sd) + ldm_patched.modules.utils.save_torch_file(sd, output_path, metadata=metadata) diff --git a/ldm_patched/modules/sd1_clip.py b/ldm_patched/modules/sd1_clip.py new file mode 100644 index 0000000000000000000000000000000000000000..3727fb48242153a7f87b6e6359367b18f19041c5 --- /dev/null +++ b/ldm_patched/modules/sd1_clip.py @@ -0,0 +1,518 @@ +import os + +from transformers import CLIPTokenizer +import ldm_patched.modules.ops +import torch +import traceback +import zipfile +from . import model_management +import ldm_patched.modules.clip_model +import json + +def gen_empty_tokens(special_tokens, length): + start_token = special_tokens.get("start", None) + end_token = special_tokens.get("end", None) + pad_token = special_tokens.get("pad") + output = [] + if start_token is not None: + output.append(start_token) + if end_token is not None: + output.append(end_token) + output += [pad_token] * (length - len(output)) + return output + +class ClipTokenWeightEncoder: + def encode_token_weights(self, token_weight_pairs): + to_encode = list() + max_token_len = 0 + has_weights = False + for x in token_weight_pairs: + tokens = list(map(lambda a: a[0], x)) + max_token_len = max(len(tokens), max_token_len) + has_weights = has_weights or not all(map(lambda a: a[1] == 1.0, x)) + to_encode.append(tokens) + + sections = len(to_encode) + if has_weights or sections == 0: + to_encode.append(gen_empty_tokens(self.special_tokens, max_token_len)) + + out, pooled = self.encode(to_encode) + if pooled is not None: + first_pooled = pooled[0:1].to(model_management.intermediate_device()) + else: + first_pooled = pooled + + output = [] + for k in range(0, sections): + z = out[k:k+1] + if has_weights: + z_empty = out[-1] + for i in range(len(z)): + for j in range(len(z[i])): + weight = token_weight_pairs[k][j][1] + if weight != 1.0: + z[i][j] = (z[i][j] - z_empty[j]) * weight + z_empty[j] + output.append(z) + + if (len(output) == 0): + return out[-1:].to(model_management.intermediate_device()), first_pooled + return torch.cat(output, dim=-2).to(model_management.intermediate_device()), first_pooled + +class SDClipModel(torch.nn.Module, ClipTokenWeightEncoder): + """Uses the CLIP transformer encoder for text (from huggingface)""" + LAYERS = [ + "last", + "pooled", + "hidden" + ] + def __init__(self, version="openai/clip-vit-large-patch14", device="cpu", max_length=77, + freeze=True, layer="last", layer_idx=None, textmodel_json_config=None, dtype=None, model_class=ldm_patched.modules.clip_model.CLIPTextModel, + special_tokens={"start": 49406, "end": 49407, "pad": 49407}, layer_norm_hidden_state=True): # clip-vit-base-patch32 + super().__init__() + assert layer in self.LAYERS + + if textmodel_json_config is None: + textmodel_json_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "sd1_clip_config.json") + + with open(textmodel_json_config) as f: + config = json.load(f) + + self.transformer = model_class(config, dtype, device, ldm_patched.modules.ops.manual_cast) + self.num_layers = self.transformer.num_layers + + self.max_length = max_length + if freeze: + self.freeze() + self.layer = layer + self.layer_idx = None + self.special_tokens = special_tokens + self.text_projection = torch.nn.Parameter(torch.eye(self.transformer.get_input_embeddings().weight.shape[1])) + self.logit_scale = torch.nn.Parameter(torch.tensor(4.6055)) + self.enable_attention_masks = False + + self.layer_norm_hidden_state = layer_norm_hidden_state + if layer == "hidden": + assert layer_idx is not None + assert abs(layer_idx) < self.num_layers + self.clip_layer(layer_idx) + self.layer_default = (self.layer, self.layer_idx) + + def freeze(self): + self.transformer = self.transformer.eval() + #self.train = disabled_train + for param in self.parameters(): + param.requires_grad = False + + def clip_layer(self, layer_idx): + if abs(layer_idx) > self.num_layers: + self.layer = "last" + else: + self.layer = "hidden" + self.layer_idx = layer_idx + + def reset_clip_layer(self): + self.layer = self.layer_default[0] + self.layer_idx = self.layer_default[1] + + def set_up_textual_embeddings(self, tokens, current_embeds): + out_tokens = [] + next_new_token = token_dict_size = current_embeds.weight.shape[0] - 1 + embedding_weights = [] + + for x in tokens: + tokens_temp = [] + for y in x: + if isinstance(y, int): + if y == token_dict_size: #EOS token + y = -1 + tokens_temp += [y] + else: + if y.shape[0] == current_embeds.weight.shape[1]: + embedding_weights += [y] + tokens_temp += [next_new_token] + next_new_token += 1 + else: + print("WARNING: shape mismatch when trying to apply embedding, embedding will be ignored", y.shape[0], current_embeds.weight.shape[1]) + while len(tokens_temp) < len(x): + tokens_temp += [self.special_tokens["pad"]] + out_tokens += [tokens_temp] + + n = token_dict_size + if len(embedding_weights) > 0: + new_embedding = torch.nn.Embedding(next_new_token + 1, current_embeds.weight.shape[1], device=current_embeds.weight.device, dtype=current_embeds.weight.dtype) + new_embedding.weight[:token_dict_size] = current_embeds.weight[:-1] + for x in embedding_weights: + new_embedding.weight[n] = x + n += 1 + new_embedding.weight[n] = current_embeds.weight[-1] #EOS embedding + self.transformer.set_input_embeddings(new_embedding) + + processed_tokens = [] + for x in out_tokens: + processed_tokens += [list(map(lambda a: n if a == -1 else a, x))] #The EOS token should always be the largest one + + return processed_tokens + + def forward(self, tokens): + backup_embeds = self.transformer.get_input_embeddings() + device = backup_embeds.weight.device + tokens = self.set_up_textual_embeddings(tokens, backup_embeds) + tokens = torch.LongTensor(tokens).to(device) + + attention_mask = None + if self.enable_attention_masks: + attention_mask = torch.zeros_like(tokens) + max_token = self.transformer.get_input_embeddings().weight.shape[0] - 1 + for x in range(attention_mask.shape[0]): + for y in range(attention_mask.shape[1]): + attention_mask[x, y] = 1 + if tokens[x, y] == max_token: + break + + outputs = self.transformer(tokens, attention_mask, intermediate_output=self.layer_idx, final_layer_norm_intermediate=self.layer_norm_hidden_state) + self.transformer.set_input_embeddings(backup_embeds) + + if self.layer == "last": + z = outputs[0] + else: + z = outputs[1] + + if outputs[2] is not None: + pooled_output = outputs[2].float() + else: + pooled_output = None + + if self.text_projection is not None and pooled_output is not None: + pooled_output = pooled_output.float().to(self.text_projection.device) @ self.text_projection.float() + return z.float(), pooled_output + + def encode(self, tokens): + return self(tokens) + + def load_sd(self, sd): + if "text_projection" in sd: + self.text_projection[:] = sd.pop("text_projection") + if "text_projection.weight" in sd: + self.text_projection[:] = sd.pop("text_projection.weight").transpose(0, 1) + return self.transformer.load_state_dict(sd, strict=False) + +def parse_parentheses(string): + result = [] + current_item = "" + nesting_level = 0 + for char in string: + if char == "(": + if nesting_level == 0: + if current_item: + result.append(current_item) + current_item = "(" + else: + current_item = "(" + else: + current_item += char + nesting_level += 1 + elif char == ")": + nesting_level -= 1 + if nesting_level == 0: + result.append(current_item + ")") + current_item = "" + else: + current_item += char + else: + current_item += char + if current_item: + result.append(current_item) + return result + +def token_weights(string, current_weight): + a = parse_parentheses(string) + out = [] + for x in a: + weight = current_weight + if len(x) >= 2 and x[-1] == ')' and x[0] == '(': + x = x[1:-1] + xx = x.rfind(":") + weight *= 1.1 + if xx > 0: + try: + weight = float(x[xx+1:]) + x = x[:xx] + except: + pass + out += token_weights(x, weight) + else: + out += [(x, current_weight)] + return out + +def escape_important(text): + text = text.replace("\\)", "\0\1") + text = text.replace("\\(", "\0\2") + return text + +def unescape_important(text): + text = text.replace("\0\1", ")") + text = text.replace("\0\2", "(") + return text + +def safe_load_embed_zip(embed_path): + with zipfile.ZipFile(embed_path) as myzip: + names = list(filter(lambda a: "data/" in a, myzip.namelist())) + names.reverse() + for n in names: + with myzip.open(n) as myfile: + data = myfile.read() + number = len(data) // 4 + length_embed = 1024 #sd2.x + if number < 768: + continue + if number % 768 == 0: + length_embed = 768 #sd1.x + num_embeds = number // length_embed + embed = torch.frombuffer(data, dtype=torch.float) + out = embed.reshape((num_embeds, length_embed)).clone() + del embed + return out + +def expand_directory_list(directories): + dirs = set() + for x in directories: + dirs.add(x) + for root, subdir, file in os.walk(x, followlinks=True): + dirs.add(root) + return list(dirs) + +def load_embed(embedding_name, embedding_directory, embedding_size, embed_key=None): + if isinstance(embedding_directory, str): + embedding_directory = [embedding_directory] + + embedding_directory = expand_directory_list(embedding_directory) + + valid_file = None + for embed_dir in embedding_directory: + embed_path = os.path.abspath(os.path.join(embed_dir, embedding_name)) + embed_dir = os.path.abspath(embed_dir) + try: + if os.path.commonpath((embed_dir, embed_path)) != embed_dir: + continue + except: + continue + if not os.path.isfile(embed_path): + extensions = ['.safetensors', '.pt', '.bin'] + for x in extensions: + t = embed_path + x + if os.path.isfile(t): + valid_file = t + break + else: + valid_file = embed_path + if valid_file is not None: + break + + if valid_file is None: + return None + + embed_path = valid_file + + embed_out = None + + try: + if embed_path.lower().endswith(".safetensors"): + import safetensors.torch + embed = safetensors.torch.load_file(embed_path, device="cpu") + else: + if 'weights_only' in torch.load.__code__.co_varnames: + try: + embed = torch.load(embed_path, weights_only=True, map_location="cpu") + except: + embed_out = safe_load_embed_zip(embed_path) + else: + embed = torch.load(embed_path, map_location="cpu") + except Exception as e: + print(traceback.format_exc()) + print() + print("error loading embedding, skipping loading:", embedding_name) + return None + + if embed_out is None: + if 'string_to_param' in embed: + values = embed['string_to_param'].values() + embed_out = next(iter(values)) + elif isinstance(embed, list): + out_list = [] + for x in range(len(embed)): + for k in embed[x]: + t = embed[x][k] + if t.shape[-1] != embedding_size: + continue + out_list.append(t.reshape(-1, t.shape[-1])) + embed_out = torch.cat(out_list, dim=0) + elif embed_key is not None and embed_key in embed: + embed_out = embed[embed_key] + else: + values = embed.values() + embed_out = next(iter(values)) + return embed_out + +class SDTokenizer: + def __init__(self, tokenizer_path=None, max_length=77, pad_with_end=True, embedding_directory=None, embedding_size=768, embedding_key='clip_l', tokenizer_class=CLIPTokenizer, has_start_token=True, pad_to_max_length=True): + if tokenizer_path is None: + tokenizer_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "sd1_tokenizer") + self.tokenizer = tokenizer_class.from_pretrained(tokenizer_path) + self.max_length = max_length + + empty = self.tokenizer('')["input_ids"] + if has_start_token: + self.tokens_start = 1 + self.start_token = empty[0] + self.end_token = empty[1] + else: + self.tokens_start = 0 + self.start_token = None + self.end_token = empty[0] + self.pad_with_end = pad_with_end + self.pad_to_max_length = pad_to_max_length + + vocab = self.tokenizer.get_vocab() + self.inv_vocab = {v: k for k, v in vocab.items()} + self.embedding_directory = embedding_directory + self.max_word_length = 8 + self.embedding_identifier = "embedding:" + self.embedding_size = embedding_size + self.embedding_key = embedding_key + + def _try_get_embedding(self, embedding_name:str): + ''' + Takes a potential embedding name and tries to retrieve it. + Returns a Tuple consisting of the embedding and any leftover string, embedding can be None. + ''' + embed = load_embed(embedding_name, self.embedding_directory, self.embedding_size, self.embedding_key) + if embed is None: + stripped = embedding_name.strip(',') + if len(stripped) < len(embedding_name): + embed = load_embed(stripped, self.embedding_directory, self.embedding_size, self.embedding_key) + return (embed, embedding_name[len(stripped):]) + return (embed, "") + + + def tokenize_with_weights(self, text:str, return_word_ids=False): + ''' + Takes a prompt and converts it to a list of (token, weight, word id) elements. + Tokens can both be integer tokens and pre computed CLIP tensors. + Word id values are unique per word and embedding, where the id 0 is reserved for non word tokens. + Returned list has the dimensions NxM where M is the input size of CLIP + ''' + if self.pad_with_end: + pad_token = self.end_token + else: + pad_token = 0 + + text = escape_important(text) + parsed_weights = token_weights(text, 1.0) + + #tokenize words + tokens = [] + for weighted_segment, weight in parsed_weights: + to_tokenize = unescape_important(weighted_segment).replace("\n", " ").split(' ') + to_tokenize = [x for x in to_tokenize if x != ""] + for word in to_tokenize: + #if we find an embedding, deal with the embedding + if word.startswith(self.embedding_identifier) and self.embedding_directory is not None: + embedding_name = word[len(self.embedding_identifier):].strip('\n') + embed, leftover = self._try_get_embedding(embedding_name) + if embed is None: + print(f"warning, embedding:{embedding_name} does not exist, ignoring") + else: + if len(embed.shape) == 1: + tokens.append([(embed, weight)]) + else: + tokens.append([(embed[x], weight) for x in range(embed.shape[0])]) + #if we accidentally have leftover text, continue parsing using leftover, else move on to next word + if leftover != "": + word = leftover + else: + continue + #parse word + tokens.append([(t, weight) for t in self.tokenizer(word)["input_ids"][self.tokens_start:-1]]) + + #reshape token array to CLIP input size + batched_tokens = [] + batch = [] + if self.start_token is not None: + batch.append((self.start_token, 1.0, 0)) + batched_tokens.append(batch) + for i, t_group in enumerate(tokens): + #determine if we're going to try and keep the tokens in a single batch + is_large = len(t_group) >= self.max_word_length + + while len(t_group) > 0: + if len(t_group) + len(batch) > self.max_length - 1: + remaining_length = self.max_length - len(batch) - 1 + #break word in two and add end token + if is_large: + batch.extend([(t,w,i+1) for t,w in t_group[:remaining_length]]) + batch.append((self.end_token, 1.0, 0)) + t_group = t_group[remaining_length:] + #add end token and pad + else: + batch.append((self.end_token, 1.0, 0)) + if self.pad_to_max_length: + batch.extend([(pad_token, 1.0, 0)] * (remaining_length)) + #start new batch + batch = [] + if self.start_token is not None: + batch.append((self.start_token, 1.0, 0)) + batched_tokens.append(batch) + else: + batch.extend([(t,w,i+1) for t,w in t_group]) + t_group = [] + + #fill last batch + batch.append((self.end_token, 1.0, 0)) + if self.pad_to_max_length: + batch.extend([(pad_token, 1.0, 0)] * (self.max_length - len(batch))) + + if not return_word_ids: + batched_tokens = [[(t, w) for t, w,_ in x] for x in batched_tokens] + + return batched_tokens + + + def untokenize(self, token_weight_pair): + return list(map(lambda a: (a, self.inv_vocab[a[0]]), token_weight_pair)) + + +class SD1Tokenizer: + def __init__(self, embedding_directory=None, clip_name="l", tokenizer=SDTokenizer): + self.clip_name = clip_name + self.clip = "clip_{}".format(self.clip_name) + setattr(self, self.clip, tokenizer(embedding_directory=embedding_directory)) + + def tokenize_with_weights(self, text:str, return_word_ids=False): + out = {} + out[self.clip_name] = getattr(self, self.clip).tokenize_with_weights(text, return_word_ids) + return out + + def untokenize(self, token_weight_pair): + return getattr(self, self.clip).untokenize(token_weight_pair) + + +class SD1ClipModel(torch.nn.Module): + def __init__(self, device="cpu", dtype=None, clip_name="l", clip_model=SDClipModel, **kwargs): + super().__init__() + self.clip_name = clip_name + self.clip = "clip_{}".format(self.clip_name) + setattr(self, self.clip, clip_model(device=device, dtype=dtype, **kwargs)) + + def clip_layer(self, layer_idx): + getattr(self, self.clip).clip_layer(layer_idx) + + def reset_clip_layer(self): + getattr(self, self.clip).reset_clip_layer() + + def encode_token_weights(self, token_weight_pairs): + token_weight_pairs = token_weight_pairs[self.clip_name] + out, pooled = getattr(self, self.clip).encode_token_weights(token_weight_pairs) + return out, pooled + + def load_sd(self, sd): + return getattr(self, self.clip).load_sd(sd) diff --git a/ldm_patched/modules/sd1_clip_config.json b/ldm_patched/modules/sd1_clip_config.json new file mode 100644 index 0000000000000000000000000000000000000000..0158a1fd52727adf22359238285afafb150f66f2 --- /dev/null +++ b/ldm_patched/modules/sd1_clip_config.json @@ -0,0 +1,25 @@ +{ + "_name_or_path": "openai/clip-vit-large-patch14", + "architectures": [ + "CLIPTextModel" + ], + "attention_dropout": 0.0, + "bos_token_id": 0, + "dropout": 0.0, + "eos_token_id": 2, + "hidden_act": "quick_gelu", + "hidden_size": 768, + "initializer_factor": 1.0, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "max_position_embeddings": 77, + "model_type": "clip_text_model", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "pad_token_id": 1, + "projection_dim": 768, + "torch_dtype": "float32", + "transformers_version": "4.24.0", + "vocab_size": 49408 +} diff --git a/ldm_patched/modules/sd1_tokenizer/merges.txt b/ldm_patched/modules/sd1_tokenizer/merges.txt new file mode 100644 index 0000000000000000000000000000000000000000..76e821f1b6f0a9709293c3b6b51ed90980b3166b --- /dev/null +++ b/ldm_patched/modules/sd1_tokenizer/merges.txt @@ -0,0 +1,48895 @@ +#version: 0.2 +i n +t h +a n +r e +a r +e r +th e +in g +o u +o n +s t +o r +e n +o n +a l +a t +e r +i t +i n +t o +r o +i s +l e +i c +a t +an d +e d +o f +c h +o r +e s +i l +e l +s t +a c +o m +a m +l o +a n +a y +s h +r i +l i +t i +f or +n e +ð Ł +r a +h a +d e +o l +v e +s i +u r +a l +s e +' s +u n +d i +b e +l a +w h +o o +d ay +e n +m a +n o +l e +t o +ou r +i r +g h +w it +i t +y o +a s +s p +th is +t s +at i +yo u +wit h +a d +i s +a b +l y +w e +th e +t e +a s +a g +v i +p p +s u +h o +m y +. . +b u +c om +s e +er s +m e +m e +al l +c on +m o +k e +g e +ou t +en t +c o +f e +v er +a r +f ro +a u +p o +c e +gh t +ar e +s s +fro m +c h +t r +ou n +on e +b y +d o +t h +w or +er e +k e +p ro +f or +d s +b o +t a +w e +g o +h e +t er +in g +d e +b e +ati on +m or +a y +e x +il l +p e +k s +s c +l u +f u +q u +v er +ðŁ ĺ +j u +m u +at e +an d +v e +k ing +m ar +o p +h i +.. . +p re +a d +r u +th at +j o +o f +c e +ne w +a m +a p +g re +s s +d u +no w +y e +t ing +y our +it y +n i +c i +p ar +g u +f i +a f +p er +t er +u p +s o +g i +on s +g r +g e +b r +p l +' t +m i +in e +we e +b i +u s +sh o +ha ve +to day +a v +m an +en t +ac k +ur e +ou r +â Ģ +c u +l d +lo o +i m +ic e +s om +f in +re d +re n +oo d +w as +ti on +p i +i r +th er +t y +p h +ar d +e c +! ! +m on +mor e +w ill +t ra +c an +c ol +p u +t e +w n +m b +s o +it i +ju st +n ing +h ere +t u +p a +p r +bu t +wh at +al ly +f ir +m in +c a +an t +s a +t ed +e v +m ent +f a +ge t +am e +ab out +g ra +no t +ha pp +ay s +m an +h is +ti me +li ke +g h +ha s +th an +lo ve +ar t +st e +d ing +h e +c re +w s +w at +d er +it e +s er +ac e +ag e +en d +st r +a w +st or +r e +c ar +el l +al l +p s +f ri +p ho +p or +d o +a k +w i +f re +wh o +sh i +b oo +s on +el l +wh en +il l +ho w +gre at +w in +e l +b l +s si +al i +som e +ðŁ Ĵ +t on +d er +le s +p la +ï ¸ +e d +s ch +h u +on g +d on +k i +s h +an n +c or +. . +oun d +a z +in e +ar y +fu l +st u +ou ld +st i +g o +se e +ab le +ar s +l l +m is +b er +c k +w a +en ts +n o +si g +f e +fir st +e t +sp e +ac k +i f +ou s +' m +st er +a pp +an g +an ce +an s +g ood +b re +e ver +the y +t ic +com e +of f +b ack +as e +ing s +ol d +i ght +f o +h er +happ y +p ic +it s +v ing +u s +m at +h om +d y +e m +s k +y ing +the ir +le d +r y +u l +h ar +c k +t on +on al +h el +r ic +b ir +vi e +w ay +t ri +d a +p le +b ro +st o +oo l +ni ght +tr u +b a +re ad +re s +ye ar +f r +t or +al s +c oun +c la +t ure +v el +at ed +le c +en d +th ing +v o +ic i +be st +c an +wor k +la st +af ter +en ce +p ri +p e +e s +i l +âĢ ¦ +d re +y s +o ver +i es +ðŁ ij +com m +t w +in k +s un +c l +li fe +t t +a ch +l and +s y +t re +t al +p ol +s m +du c +s al +f t +' re +ch e +w ar +t ur +ati ons +ac h +m s +il e +p m +ou gh +at e +st ar +wee k +! !! +c lu +th ere +n er +t om +s el +ï¸ ı +wor ld +v es +c am +go t +in ter +of f +u m +ton ight +o ther +h ou +loo k +j e +i d +si on +be au +at t +el i +or t +re c +f f +st er +su pp +g en +be en +il y +te am +m m +i c +pe op +it t +at s +on ly +mb er +en g +b ri +m p +k now +b ur +b ar +in s +lo w +sh e +ro w +â Ŀ +t ro +peop le +vi a +lo w +ag a +be t +x t +f ac +ch ar +e ar +w al +s en +f am +b le +n ati +is h +n or +g ame +li ve +s co +le y +d on +ic k +b all +ver y +the se +p an +i a +at ing +c r +a re +g ir +ma ke +st re +sho w +. " +f l +u p +d r +than ks +il li +w om +st s +i g +s ur +ever y +c ur +vie w +le t +in to +mo st +n a +in di +g ar +ha d +s ou +v ed +an t +iti on +ma de +f ol +un i +it ed +ðŁ ı +ic al +th r +read y +ch ec +d ra +k es +boo k +e p +si c +mor ning +ne ws +c au +c t +w ell +an c +pho to +th an +or s +bir th +g g +ou t +ne xt +som e +en ing +stor y +ch ri +do wn +hom e +f fe +fre e +d a +b or +f il +ci al +than k +si de +le ar +qu e +l ine +t en +at es +ye ars +m y +pho to +beau ti +ri ght +n u +for m +shi p +b an +th er +d ays +g am +as on +g y +ðŁ İ +birth day +se t +ic k +e t +st ill +com ing +ta ke +ðŁ ĩ +b b +s ol +s on +d en +e p +mu sic +the m +de n +wh y +f oo +c ra +am az +w n +h ol +t ting +w r +u e +ma g +c ro +l an +c lo +b ra +a k +s ing +c al +re ad +' ve +jo h +b ab +d ri +b lo +bi g +er ic +in t +t or +tr y +l a +le g +hou se +m ic +v al +beauti ful +l itt +chec k +ne w +ver s +s w +ar i +pla y +h er +âĢ ĵ +w in +m a +con gr +sch ool +f un +. @ +he al +ic h +d el +wh ere +l on +ke t +tw o +mu ch +wat ch +v en +d ed +a st +k ed +b as +go ing +m p +e ver +w ays +ro o +de sig +l y +s ed +to p +l in +ch an +to o +it ing +d ent +gh ts +t y +sp o +ne ed +b lu +in st +be ing +âĿ ¤ +w el +l s +hi m +m ay +st ing +n a +el y +litt le +g a +n at +tom or +m c +h on +w ant +a ir +pi c +am eric +p er +le ss +wee k +ve l +a h +c ap +ch am +g er +ti m +tomor row +ne ss +st ate +h al +ser v +z e +o s +p at +v is +ex c +s in +f f +c ity +c en +an y +b el +su mm +t in +w ould +loo king +k o +ce le +fam ily +m er +po w +hel p +bu s +c o +c le +sel f +en s +ic s +th o +an i +ch o +le ad +b s +t wee +th ink +for e +ch il +vi de +di d +al e +ch i +v il +en ds +w ing +p as +' ll +v ol +s a +g s +man y +j ec +be fore +gra ph +n y +ur ing +w il +d d +bu il +f av +st ed +tr an +l ing +ou d +d ge +fi el +nati onal +st a +c er +w ere +in a +se ason +c ou +n ed +amaz ing +ti ons +cele br +n s +a th +he ad +s day +d ar +lo c +v in +an other +g oo +s at +n y +jo in +pre s +s es +s ing +an a +in ing +.. .. +c our +ï¸ ı +ac t +cau se +li ght +am s +t a +b al +f c +hi gh +off ici +t t +chri st +d ic +d ay +ra l +h or +: ) +vi si +n am +o b +ma s +gh t +re ally +t un +fin d +thr ough +por t +u t +ti ve +st y +n e +or e +ðŁĺ Ĥ +supp ort +ne ver +ev en +ðŁ Ķ +h a +y a +l d +u k +r an +j am +wi th +me di +d es +ne y +ch ing +al e +h y +k in +! ! +d y +pl ace +al so +b le +wh ich +bl ack +b li +s ay +par k +pl ay +ir e +vide o +week end +a il +ke y +p t +w ard +fri day +d in +ine ss +g ro +b en +al ways +t ball +ag o +m il +c y +pro duc +di sc +un der +ple ase +sp or +fu ll +e y +ðŁ Ļ +is e +iti es +c at +k no +u se +fo re +k er +ar t +hi gh +op en +s an +e f +our s +sh ed +st ri +d ro +aga in +i m +ðŁ ĵ +en jo +fu n +ge tting +p en +g er +c li +an y +ever y +e u +wom en +â ľ +e st +c ould +r y +" @ +th ou +sh a +comm un +b er +d ents +di s +wh ile +aw ay +di o +h am +g la +d ate +k a +mis s +un ch +w on +in f +roo m +g a +re al +ex per +di rec +sh ould +sp r +g ol +l ong +bet ter +or i +e y +i ence +il s +z z +h an +f ound +v s +â Ļ +po st +ti c +par t +m en +ren ce +ce ss +v ic +s il +sho p +ðŁĺ Ĥ +f ood +v al +sti c +y ou +s ays +e lec +st ar +o c +l and +i d +c tion +fiel d +s of +st art +wat er +fri ends +on es +ðŁ Į +f la +f ar +wh ite +par ty +in st +gr ou +t v +every one +m ent +j a +ch a +pr in +an ts +d uring +l at +l ar +we st +th en +k a +y oun +in sp +in te +we en +visi t +aga inst +re le +he ad +c es +to wn +loo ks +th re +re gi +ren t +pro jec +gir l +se ar +w o +m om +c ar +h un +pu bli +d i +p le +c all +c ri +u m +for d +per fe +fri end +h ard +ssi on +te st +pla ying +ar ound +be cause +ke ts +me et +sat ur +ar ti +wor k +j un +v en +r un +me mber +por t +su per +t wit +s am +el s +t ly +ad v +ati ve +at h +s ure +av ail +la r +s qu +ar ds +ev ent +m en +l l +o ver +lo gy +it al +tim es +m al +b ack +c oo +ma king +st ru +â ģ +it u +sh ar +g an +c as +s n +summ er +pic ture +f an +h in +christ mas +c y +pr oud +cham pi +desig n +pp ing +ho pe +c a +avail able +ma y +we d +photo graph +spe cial +sal e +sto p +er y +a we +al ity +hi story +am a +pre si +b ru +wor king +d one +d r +k en +fe at +w ood +ate st +sun day +mo vi +vel y +s le +f ace +sp ec +stu dents +b y +ha m +sp on +bus iness +d at +i e +i p +so ci +g lo +h and +re cor +r s +me e +ke ep +p ur +heal th +sh e +com ple +go d +da vi +col lec +li st +r a +clu b +t ers +in clu +th ings +pl an +â ĺ +joh n +sh ing +at ul +so on +blu e +g or +satur day +w on +congr atul +se e +âĿ¤ ï¸ı +tho se +ðŁĺ į +fin al +d ou +it h +o wn +ro ad +t our +a st +indi a +ti l +n d +f er +fav or +su l +lear n +fir e +ju st +grou p +a h +r ac +bo dy +u r +c are +à ¸ +p lo +o h +po s +gi ve +te ch +su b +c ent +er ing +y m +il ity +f ic +lon don +v ir +gu ys +b a +ðŁ ¤ +bab y +sc re +ðŁĺ į +tru mp +un der +chan ge +i an +col le +ss es +l er +ss ed +n ice +ann oun +pow er +s ar +a king +min i +s li +s wee +k ar +fu l +c ru +ac tion +a ther +) . +st and +de vel +a a +g an +le ft +lo l +re l +tran s +m ents +in t +e f +man ag +di g +gen er +do wn +p au +ti v +k u +th ur +k en +st on +f ans +tal k +twee t +t oo +sty le +pro te +se con +fr on +awe some +g l +p al +ne t +s or +la u +g on +sin ce +t ty +ser ies +me mor +b eli +fil m +di d +di es +o t +congratul ations +p ra +e ve +w oo +offici al +su c +in cre +b on +par t +pp ed +cla ss +si ve +bo y +cu l +perfe ct +t ou +d am +wel come +foo tball +h i +p ap +wa it +ad a +congr ats +youn g +exc ited +re ce +j an +v a +re d +st ra +medi a +' d +do es +le t +mu l +ill s +gre en +m el +to ge +fu ture +ye ster +vers ity +for m +ta in +i de +ch es +ki ds +qu i +ha ha +de ta +bi g +favor ite +gir ls +con tin +do m +sear ch +u al +a ir +d ers +mon th +c er +yester day +commun ity +ad e +do g +vil le +ic es +d eli +sy ste +ru n +is m +he art +c up +en ti +fe w +presi dent +e ds +un til +fe sti +o k +f lo +sa id +ol e +me d +tra vel + £ +ph one +toge ther +fa st +lo t +gam es +sh ir +bet ween +y es +th ers +do ing +m ac +at or +b and +fol low +projec t +devel op +di ffe +con fe +spe ci +ca st +y s +bo ard +r d +i al +sh oo +r am +ha ving +sh are +fol low +on e +n ame +m r +pu t +disc u +or y +c ame +ou s +s ite +twit ter +t b +t it +fin ally +z ed +su per +com pan +us ing +all s +li st +r is +sho t +g al +t ar +de l +joh n +âĢ Ķ +some thing +ra m +inte re +wh e +b it +ðŁ į +stre et +oun d +a i +tic kets +movi e +re al +k y +ta king +o pp +c c +l am +m oun +in ve +bl ack +us ed +on line +y or +loc al +gu e +c ks +o w +ge st +bo ys +illi on +con t +re ci +in ed +eu ro +no w +se en +p h +te ach +de f +sou th +su ch +aw ard +mu st +is su +ca re +fe el +p lu +l atest +spor ts +we b +te x +e ment +s k +fi c +w an +te ch +o t +bo x +n er +fre e +t al +a sh +c ase +ho t +won der +mee ting +er a +ch all +ðŁ IJ +jo b +il i +c ool +j our +th s +m o +f el +di e +mic ha +e le +te am +serv ice +st and +ma kes +p ing +ear ly +com es +e k +ho li +v ers +ag ue +s au +thre e +mon day +fa shi +some one +th ro +se a +b ad +supp or +tur n +ur y +m ing +photograph y +n ic +mar k +pre tty +ss ing +wat ching +me mb +ar ri +coun ty +be ach +fr an +cen ter +pol ice +b at +publi c +t an +pre ss +s af +s y +ge ts +ro y +n ers +y our +bu y +st ers +sho w +as ed +chil dre +af ric +in es +sp ace +sc ri +h all +pa in +ar ing +hom e +m ur +heal th +ch ed +s and +rece i +gu y +e a +americ an +re si +childre n +- - +i ri +ing ton +coun try +ro ss +le n +ann a +boo ks +b c +e ce +d om +lo vely +k h +pe t +g y +g ri +st age +off ice +ro ck +m on +b ay +t able +su n +m ed +th in +l or +f low +( @ +uni versity +stor e +fron t +goo d +z a +vo te +nor th +he y +an im +or der +mi d +with out +a de +re member +mar ket +? ? +mu s +tra ining +e duc +bu t +co ver +st an +sc en +b la +bre ak +l ou +s ame +g old +a in +o s +bo th +l it +ver n +a i +al bu +p a +enjo y +be g +ell ing +thur sday +inf o +s an +americ a +ha ir +te l +mar ch +con cer +colle ge +confe rence +ap p +h our +ch ang +â ļ +s our +ol s +we ather +w ar +p hi +festi val +secon d +cu te +pr ac +en er +str y +le a +pol it +s av +se n +o w +m i +ne ar +ou ght +z e +co ffe +w illi +d an +se y +davi d +e se +f an +de ci +the at +no v +ati on +tr ac +sc i +re view +c el +e m +u n +ju ly +or ig +ti on +d ru +form er +st ay +af ter +in v +too k +dat a +b al +tu es +d an +ev ening +ðŁĺĤ ðŁĺĤ +d ol +u res +pro vi +t s +e st +sig n +j ac +u k +s ong +ye t +bo w +in du +j ap +h oo +po int +any one +z y +i st +h ur +it al +buil ding +wom an +ch ur +j er +per for +co ach +le ague +ce ss +ne t +i mag +nati on +br it +qu e +aw ards +ag es +wor ks +c ed +man ce +l ate +ig n +mon ey +tru e +i i +t ell +pl ac +p ac +as y +wor ld +be hin +im port +read ing +gra m +gi ving +me t +h it +for ward +st om +pres ent +jun e +so cial +no on +mar t +hal f +s we +go vern +k er +deta ils +li sh +_ _ +ac y +si a +ber t +f all +! !!! +) , +th i +d iti +sp ort +k ing +f it +st af +c at +mu se +cen tr +y er +con tro +b loo +wal k +ac tu +did n +li m +lear ning +re search +wed ne +au th +h ours +k y +f ar +h en +.. .. +it ch +ri l +str ong +sk y +que sti +jam es +r on +d g +f ur +c in +do es +app ro +mar ke +tu res +ful ly +ch at +behin d +te m +fin i +mis sion +b att +fe el +he av +every thing +b ar +w ish +pre mi +i ma +exper ience +e ach +re port +swee t +tic s +spr ing +re spon +syste m +vic tor +l in +sa w +al ready +gh ter +f le +ã ĥ +br ing +albu m +- - +ell s +st an +to m +inter national +w ent +an ni +mat ch +pp er +st one +sm all +ra in +fashi on +are a +v an +ag ram +k o +thou ght +wor th +v an +m er +coffe e +it es +g n +arti st +c on +ar ch +c ir +se cre +gr ound +is o +h and +co m +bri dge +h s +x i +l ink +pu l +sp l +r ace +f li +ri ver +g as +di sco +d al +play er +f it +photo s +it y +o k +j or +tr a +ap ril +ad s +a di +sol u +beau ty +do or +me ss +up date +ali a +sch o +en ed +mom ent +sco t +sc ience +i or +ti es +ac ross +ous ly +sh es +does n +p age +wat er +m illion +cla ssi +l ic +ca st +form ation +micha el +ell o +s mo +in ts +vi sion +op ening +ld n +au str +tues day +win ner +po ssi +r ound +shir t +di t +b o +u es +il led +al ong +tri p +star ting +im pro +k an +per son +no t +re co +ne eds +c le +li e +re st +r ing +win ter +si mp +mo m +be er +fac e +tor s +us a +collec tion +ge or +se ssion +tr ying +la s +la ke +j en +orig in +stu dent +se cur +v in +pic s +ex pe +com p +gon na +e qu +b ad +le y +a u +memb ers +bre ak +w all +gi c +din ner +bu l +insp ir +r i +min d +ic a +win ning +tal king +t ren +s is +t en +wonder ful +s now +he ar +th om +no thing +gu i +st in +blo g +fe st +b un +le e +war ds +ch ance +dre ss +re n +pau l +p es +tech no +ru ssi +c ard +e ast +mar i +w ine +t i +la w +str ic +k i +ap e +au gu +pro fe +as h +cour se +ma il +ren tly +d un +m un +lo ve +is land +dri ve +s l +end ed +ma in +lo st +nat ure +âĿ¤ ï¸ı +ch ic +re por +p in +pr o +st ation +ce p +ta kes +compan y +go es +on d +ma ch +ra dio +d ad +ro ck +j a +p ay +champi on +e e +in de +tt a +ati c +t ab +beli eve +ener gy +z i +t at +wor d +on ce +re sul +y l +and re +an o +inst agram +clo se +t am +cu stom +w a +con om +sho ws +li fe +k in +ro b +t age +n ation +al most +list en +sa ve +re li +ac e +mar y +tre e +for get +j ack +wa iting +direc tor +h ill +bor n +te mp +f l +st e +on a +sing le +wedne sday +un ited +in o +@ _ +ne l +celebr ate +en ding +de al +j i +can ada +hu ge +tr ack +âĢ ¢ +f y +fan ta +an g +yor k +rele ase +p un +ep iso +wor ds +t our +p ack +i gh +classi c +perfor mance +ke t +after noon +recor d +win s +pro ble +âĿ ¤ +f our +b ed +ban k +d ance +s la +cal led +mi ght +a p +pa st +ðŁ ļ +diffe rent +it e +gi ft +ssi ve +chur ch +c us +pro gram +ho tel +ic e +ma d +secur ity +en ge +d c +en ough +st a +e ty +de ad +g un +he ar +m ir +hu man +gre ss +oun ds +pi ece +bre aking +gar den +fi ght +vie ws +f ish +star ted +run ning +gre en +ser i +s m +as k +d or +de ath +e conom +er i +ir d +s er +l unch +âģ ¦ +bo x +nat u +ba se +b an +f al +glo bal +wil d +wo w +out side +mo ve +le ad +an al +muse um +on g +ha w +pow er +than k +b ac +char ac +cam pa +dig ital +r o +op er +de v +w ol +p ati +f a +m ale +pap er +ill ing +c s +â ĥ +educ ation +ta ken +e ffe +m ou +s ad +" . +bas ed +staf f +inclu ding +li ving +a c +ch ina +mo b +stor m +lu ck +ph il +o o +y n +tra vel +k el +ti al +pr ice +boo k +import ant +bi o +p ool +ny c +f ab +lo ad +? ! +chall enge +cr y +ser ve +we ar +bu s +ta in +nu mber +ro r +k at +i z +th ough +ho sp +m m +fa ir +ut es +ho t +po p +fi ed +cam p +develop ment +li br +c ali +em s +âģ¦ @ +b ol +is ed +stand ing +mo del +it a +g le +bro wn +ima ge +ve red +for ce +o il +par tic +sh u +da ily +la w +se c +cla ss +cam p +holi day +cl in +k ers +pres ent +gam e +incre di +er ship +inter view +b ill +du e +and y +ab o +in nov +ke y +ac ade +p il +mo der +st ars +br and +f er +wee ks +con si +pr e +sa fe +wr it +di um +la unch +marke ting +ann ual +as si +cour t +la dy +c ted +and a +in side +chil d +opp or +sm ith +centr e +gu e +âģ © +f ren +st y +for t +ent ly +is n +ke ep +to ber +on y +bo y +al d +col la +de mo +le vel +com pet +ad o +b our +fanta stic +m ate +s u +sou th +oppor tun +vers ary +lat er +bu d +face book +la un +ster n +p it +! " +ma j +gr am +tb t +fi re +happ y +a ks +wh ole +actu ally +ill er +ell a +lo ts +al ex +an ge +lan ds +ðŁĺ Ń +en ter +r ou +episo de +p ed +in ten +sh ire +wh o +pl an +h o +ca ke +we st +mag az +fre sh +c c +n ar +ch ris +wr iting +w er +n om +l o +mi dd +dre am +o l +ti onal +de b +> > +be come +s i +gr and +all ing +hi stor +ri de +i red +saf e +que en +ci l +in tro +vi l +d ani +.. . +ar tic +st at +sh ort +or ing +sel fi +mis si +do c +b it +g all +b om +i re +se lec +d ition +ðŁĶ ¥ +fri end +be at +gh ting +ðŁĺ Ĭ +pe ace +ex hi +ant a +ab ility +il lu +j on +qu ality +tri bu +m es +play ers +fa ir +cu t +c ab +suc cess +b i +su s +pro mo +sch e +an ge +ic o +comm it +cat ch +ill a +kin d +feel ing +qu o +s ay +anni versary +spo t +mo ther +an e +p end +your self +op s +app le +min utes +p o +gr and +ri es +ha ha +care er +ed ition +de c +ric k +am i +concer t +iti ve +ge ous +d ly +t te +adv ent +i g +li ghts +ak er +sk y +âĥ £ +r ay +fini shed +w ay +s d +ac coun +ðŁĴ ķ +ck y +ch el +lit er +pain ting +lo s +st un +techno logy +n as +ma r +b il +afric a +ki e +ey es +gol f +plu s +ni a +it ec +serv ices +wed ding +kno wn +te le +.. ... +star ts +pa ren +w ants +ati onal +mon ths +win do +fav our +er t +magaz ine +ex clu +re ve +b c +origin al +e ss +n al +an ti +st ro +t ice +stu dy +à ¤ +v ac +nation al +fi ve +ra in +ve ment +u te +ver se +em er +ar my +possi ble +gue ss +val ley +ther n +cro w +m r +col or +on to +pic k +cle ar +dar k +t ac +wan ted +it ting +can cer +govern ment +di e +ri se +z ing +col d +f oun +stu dio +str ation +bro ther +a head +sh el +mic ro +ic ally +d au +sig ned +vi ol +a x +as se +i o +w re +spl ay +ch ick +augu st +pl at +ti ps +sp i +hu man +e asy +lo gi +mi ke +gro w +ag re +w w +sh ad +mo tiv +wi de +tur ns +om g +v ar +de fin +su g +j im +ðŁĶ ¥ +t d +campa ign +nam ed +re tweet +co p +t v +le av +k is +dou ble +s mar +issu e +vil la +in formation +li es +sto ck +n t +di stric +sh or +mi x +er o +se p +me x +see ing +li ve +re min +co de +g ur +s c +wil d +l un +h ood +spo t +fa ther +fore ver +up d +tra f +f ly +ne ed +gra du +tra in +ma ke +s ab +be y +si ze +lead er +tal ks +e u +lo g +fo x +gor geous +le ss +le ts +sur pri +my self +no te +li ves +f ru +lo ved +se ver +de m +j i +so c +h old +do gs +n i +â ŀ +lea ve +air port +ben ef +ex pl +shi ps +comple te +ach i +gre at +vin tage +j ack +ro c +woo d +pri v +off er +ey e +ver sion +te a +co ach +off ic +w ell +g en +s at +h h +you th +o x +? " +m t +mi x +g g +d le +natu ral +buil d +break fast +thin king +theat re +mo on +ber g +go als +geor ge +en e +exc ell +il ing +tun e +y ed +g ate +m it +net work +jo e +h ello +f b +tu be +we aring +ath le +stru c +har d +gla ss +g ers +thro w +g es +b t +indu stry +manag ement +ali st +go al +stre am +y el +a vi +ici ous +o thers +s ki +chri sti +bir d +e sc +m in +tr o +l t +j an +im p +ri ghts +sh a +or gan +cent ral +ar a +ro ll +favour ite +che ster +el se +p ay +car s +m ine +ste p +prac tice +maj or +h ang +ðŁĺ ĺ +n on +v ari +eng ine +vol un +di a +i led +arch itec +p ink +d s +th y +wa sh +web site +ba g +contro l +el li +f ra +an sw +d ence +y u +r on +ol a +g in +dr in +li c +cou ple +sp ar +g on +cre ate +c t +celebr ating +de ep +e at +te e +vo ice +dro p +vis it +at ors +sta dium +f t +w is +ro l +gra de +fam il +po ints +re pre +w as +traf fic +jap an +or g +hon or +tex as +man u +âĻ ¥ +safe ty +re r +b ag +em plo +rele ased +re gu +ak a +n av +ro le +sen ior +spec t +cro ss +lin es +be st +p ack +s in +ti e +mis sing +sun set +li ber +is ing +j ay +sk i +champion ship +ac tiv +la dies +play ed +y y +pu bl +al o +pri de +s r +pa ki +lu x +sur vi +ck ed +e ts +cho col +austr alia +par is +mi les +h at +ment al +al a +me an +mob ile +en a +in si +f ound +chi ef +t ag +incredi ble +re turn +à © +goo gle +fren ch +cre w +hal lo +ali an +j az +ch er +sil ver +nor th +eng lish +base ball +c af +lim ited +follow ing +app reci +ear th +k ir +ve mber +w ed +p tion +g ed +oc tober +fl ori +c r +en cy +ga ve +lor d +stu ff +ber ry +po st +sm ile +bro ad +st ate +gg er +me ans +ic y +gu n +y o +ma ster +bur g +han ds +ni e +/ / +uni on +brit ish +big gest +distric t +am ing +h il +o ce +per son +pas s +en vir +scho ols +arri ved +anc es +insp ired +ex pla +be n +libr ary +bo tt +am p +ste ph +cont act +b ang +m s +cali for +t old +batt le +b b +chic ago +âľ ¨ +str ate +sh i +de ce +- ) +ad d +la b +j ones +leg end +cast le +ing er +st ance +be l +ur a +re fu +lead ers +po t +se x +h ic +artic le +ki d +fr ance +x x +ex e +gui de +volun te +pr int +al i +ce o +twee ts +w x +scen e +vol u +ant i +h an +as soci +shar ing +ro se +mini ster +sh er +in ste +cle an +demo cr +po ster +sk in +p sy +pro per +cra zy +i am +o re +in i +any thing +po d +mo ving +cl ick +ex plo +com b +cra ft +f i +bloo d +is ra +publ ic +d ent +ol ym +eng land +a si +ch er +fac t +envir on +har ry +g one +me dic +enjo ying +just ice +j r +indi an +wi fe +s ound +t es +dra wing +p al +ide a +cr it +ju li +il er +war m +cl ar +thou ghts +def en +coun cil +intro duc +di ed +jan u +an i +s end +li er +m l +intere sting +tra de +win d +b ay +s ac +anc y +sour ce +b es +org ani +ar ly +lar ge +ff ici +ta g +u t +de sp +o es +tit le +sy m +pic tures +op en +wom en +sho wing +ri a +le ast +lead ership +cur rent +elec tr +val ent +list ening +c key +gener al +de ser +du ce +; ) +c ent +ðŁĺį ðŁĺį +sco tt +po or +selfi e +ev ents +i on +wr ong +de v +h ill +sep te +cul ture +l ine +sor ry +s ent +si ster +ce pt +k ri +no vember +ar i +announ ce +z ation +br an +g ent +d u +l en +per s +f m +mart in +o p +e mb +om e +midd le +suc cess +pe ter +janu ary +f lu +rac ing +d av +bi ke +ðŁı » +pe t +shoo t +profe ssi +feat uring +septe mber +now playing +sta ur +z a +on ic +qu ick +bas ke +spe aking +mil it +z er +chick en +b ell +s ad +co ast +lo ving +y ers +d j +pan el +ver age +s wit +ic ks +b ou +califor nia +s am +paren ts +er o +k illed +ph ys +jo bs +mi gr +an th +e mo +hallo ween +and er +c m +compet ition +e ag +s ket +sp ir +may be +exclu sive +app e +jour ney +scre en +for d +i o +h ate +u g +sou l +her o +soci ety +sy n +gu it +n h +d j +as es +im pre +ti me +sal es +d d +f ts +summ it +stun ning +om s +tur ned +cle an +sof t +be at +re staur +de red +en ces +ma gic +di o +sh ine +gu est +health y +exhi b +stor ies +po pu +n is +el a +bel ow +fun ny +resul ts +s ne +cur rently +ar d +down load +f light +m al +f ine +p ad +ch u +ent ed +h at +ðŁij ı +ste ve +j o +mar k +r at +b all +p c +p on +b by +o li +ar ts +as ure +bow l +att ack +mi c +de ar +ran ge +en ter +chocol ate +br illi +ac cess +, " +? ?? +ch ap +con st +t n +mat ter +blu e +gall ery +em p +work shop +lead ing +y ours +baske tball +w anna +th u +_ _ +mar ri +sle ep +bi a +ch e +ma d +imp act +o wn +si r +chan nel +euro pe +e sp +k itch +hosp ital +w ra +roy al +f s +ne u +qu ar +ne y +ac ks +ch ase +pp y +st al +at ely +ti m +dece mber +r are +per form +cre am +we ight +ch oo +ni ght +ha ven +fr anc +kh an +buil t +hel ping +tru st +ty pe +gol den +ta x +s now +s wi +di sa +questi ons +ve y +li ght +c n +cl oud +thom as +ag ed +sh ou +te ams +gr an +re ason +a a +you tube +v p +pi zz +manag er +bur y +cre dit +tre at +ma x +i k +ma in +g ing +de ad +pro bab +ye ah +ã Ĥ +br and +so li +pl ant +ta yl +gir l +ðŁĺ Ń +nam ent +au to +mess age +ko re +n ur +ter r +ag u +ma p +sen ting +lo ves +gi ves +g ab +z en +ro bert +con fir +w ars +o m +sta in +cam era +and er +won der +a b +ca p +s old +su it +wal king +contin ue +effe c +dau ghter +d anc +cha in +mul ti +ki d +y an +champi on +v o +ta ins +ho st +min i +mis sed +re sc +ly n +fin ish +del icious +s as +tayl or +i b +pro mis +produc ts +moun tain +flori da +regi ster +tre at +rec ent +fe male +boo th +mat t +ve hic +s op +mo tor +suppor ting +phi c +ex tre +dr ink +lan e +th ird +p s +con stru +ce re +far m +ðŁİ ī +tu red +ðŁij ī +c ats +a j +gi e +shoo ting +as ked +paki stan +am e +m b +g il +leg al +squ are +in vol +dra w +oo oo +!! !! +opportun ity +p y +e i +b ts +teach er +charac ter +john son +br on +ly wood +ch ine +c ing +c ine +d ge +gam ing +russi a +ci a +quo te +ric h +go v +flow ers +sp iri +st in +grow th +ðŁı ¼ +comm er +j uni +mu m +r an +s na +a ren +c b +ac tor +col or +si t +pa ir +ch i +bo w +acade my +hel d +r ang +me tal +y l +ac tive +probab ly +t ch +need ed +spe e +cho ice +ital y +ry an +ðŁĩ º +flow er +v it +m n +found ation +b ak +si ons +ne igh +f loo +he ard +re mo +fre sh +ing ing +re f +to wn +cl ou +je sus +spiri t +cou ldn +z es +ðŁĴ Ļ +willi ams +pro ce +moder n +pro cess +sho es +cre ated +tri c +issu es +ann e +att en +de but +h r +n it +sti g +a po +e ps +z u +ã Ģ +si x +car ds +lan gu +fam ous +tour nament +se l +e bay +y n +st on +k ick +announ ced +k am +vo c +brilli ant +hou se +che ese +war ri +mus ic +ho ckey +ðŁĺĤ ðŁĺĤ +sk ills +au tom +smar t +med ical +mon y +e x +gu ar +gi ve +pers onal +ven tion +al li +pre ss +flo or +m c +victor y +hi m +simp le +th or +ðŁĩº ðŁĩ +ta il +lu cky +ale x +qu ite +bo t +ssi ons +chall eng +c ann +amaz on +h ell +b ought +) : +ed y +secre t +produc tion +inde pend +de fe +ad ded +p r +p ag +be d +gre atest +with in +j ay +ðŁ ¥ +ire land +re ly +s d +te xt +dri ving +pro gram +spe ed +col um +str on +à © +fore st +â ĸ +mach ine +co in +sc ar +oun t +bi e +¡ ï¸ı +por tra +comm on +wre st +recei ved +kno w +inve st +pl ans +ac cor +ad op +ter y +re ali +p p +k al +art work +me an +go d +inste ad +an ci +motiv ation +as ing +inspir ation +up coming +polit ical +euro pe +m ers +heav y +ðŁij į +fe bru +scot land +ou gh +b t +bo ss +sche du +spe ak +n ick +u red +in o +e k +ri sk +tor y +pres ents +b on +ru g +st ates +exhib ition +il o +m ill +br ought +: -) +tou ri +com e +offici ally +champi ons +do ors +re p +po se +ex tra +k ings +soc cer +squ ad +app lic +at a +some times +t ari +excell ent +ðŁĺ ĺ +stra ight +car ol +ri p +âĢ į +gra phic +m ol +elec tion +febru ary +as ons +l i +di r +m t +n ick +u su +m rs +com ics +inst itu +cor por +v i +ðŁĻ ı +tu ral +di se +ac ci +we are +am ong +sho pping +t ill +wh at +cha ir +sp an +chine se +innov ation +jo y +k it +cent ury +ob ama +ph ili +f c +re ach +c iti +ul ous +n on +d ang +happ ening +bur n +p el +or ange +d v +k ick +cla im +ing ham +ph y +no v +pod cast +wh i +ni ghts +ear lier +be ar +la h +exc iting +or a +gi ven +s lo +memor ies +contin ues +produc t +gh o +c d +kno ws +ðŁİ ī +publi shed +discu ss +y ard +i phone +tri es +w all +fe b +are n +tru th +win ners +tu re +diti onal +milit ary +proble m +m and +do g +lo ss +c ric +can adi +ve ter +villa ge +" , +y r +un g +don ald +ag ing +bir ds +sci enti +le s +th is +regi on +tic al +itt en +il a +ðŁĺ İ +d ad +di am +abo ve +st ren +li t +p ir +la b +fo cus +bus y +d ur +app ly +s ma +auth or +ac i +exe cu +dom in +re la +jack son +at o +wash ington +ðŁĻ Į +k ill +popu lar +ce ment +ro ad +e ating +loc ation +v ent +ar re +n an +cu sto +advent ure +or din +spor t +ul t +lo ck +questi on +dri ver +land sc +on i +k ins +p d +jor dan +te red +k k +a f +chil d +s p +just in +en i +s elling +z o +wh it +bo ston +partic ip +sig ning +happ ened +he at +m am +dre ams +lo ws +gra ph +the day +head ing +br o +ble ssed +vi c +ve gas +h d +in ning +ro man +and ro +den ti +u se +c it +pro gress +writ er +bo b +ff s +gro wing +b ly +aw are +ex am +sp ent +be t +sc ore +bey ond +do cu +ad el +s f +cou ra +colla bor +in c +priv ate +bo at +* * +z one +p ha +b ill +to tal +plan ning +to wards +plac es +pre view +cre ative +dam n +ide as +se ems +po ten +say ing +di splay +s w +a qu +lou is +by e +li l +e mail +we stern +ger many +ell er +re s +f ant +ment ary +de als +ric hard +jer sey +stren g +ra d +pizz a +mon d +w are +l ac +g i +ar chi +c d +yel low +rec ently +re ach +à ¹ +kitch en +desig ned +tr y +g al +restaur ant +at ure +w w +j as +l ma +ðŁij Į +pa in +av o +min ute +sch ol +ther ap +tic ket +d ry +jap an +diti ons +ter ri +sel ves +happ en +t up +ma g +cop y +sh er +free dom +f ile +speci ally +tor onto +lo ad +g ary +re y +answ er +lo y +cau ght +pri ze +u ne +fic ation +ni ger +sy d +tou ch +feat ure +jaz z +recor ds +him self +di sh +ro ber +spot ted +ma ster +wa ve +fin als +bu ll +for um +al d +re comm +ch a +a e +d oo +inst ru +tru ly +l g +in k +bro thers +de st +j im +m it +clo sed +is on +tri ed +s anta +af fe +w an +hor se +g row +camp us +rel ation +nati ve +jour n +go v +o ct +k it +b ound +part ner +re ma +crow d +! ) +c alls +ra il +qu ali +solu tion +con test +con vers +sn ap +b ase +in iti +ta x +y e +ent repre +it or +constru ction +foo d +present ed +n ings +cli mate +k m +mo del +b j +blo ck +present ation +dre am +fi x +c alling +bus ine +con gress +under stand +we b +val ue +ï¸ı âĥ£ +mex ico +it ely +ki m +char ity +ref lec +bl an +fl ying +anal y +famil ies +b and +reci pe +celebr ation +ac cep +ar y +to t +g b +intere sted +cap tain +âĻ ¥ +ti p +ab sol +bra z +inve stig +o logy +de c +tru ck +ver ing +c lear +don t +go tta +ad vis +beg ins +ma ss +de scri +blo ck +k im +davi d +son gs +memor ial +feat ures +su stain +' . +gra b +jo se +v a +con serv +se ts +man chester +fi ghting +de gre +ag a +in d +sle ep +pos ition +ha ir +sig ns +pol icy +it o +al ert +st am +sp end +w y +absol ut +d m +anim al +my ster +success ful +proble ms +ro bo +k ay +gar den +p d +may or +d ale +t ol +off ers +vis iting +friend ly +tre es +offic er +accoun t +ke vin +ðŁij į +gi ant +contin u +con su +tr act +n fl +ðŁĺ Ĭ +h q +b ility +a ar +dis ney +te en +on ed +wh ite +tra iler +de dic +al one +absolut ely +dig ital +willi am +in ation +s wa +e e +enti re +ger man +ro ll +h its +co st +st ay +th a +ali ve +accor ding +co t +liter ally +her it +re ti +haha ha +exper i +li kes +g t +ste el +__ __ +ch air +christi an +to wer +diffe rence +m d +tre ss +mi d +prin ce +afric an +fe der +foo t +car ri +ser ved +r ice +sh all +feat ured +ck er +rec ru +po e +sen se +ni fic +com edy +cont ent +f at +po sted +con tribu +tim ate +li ver +mb le +inter net +ag e +europe an +cl ing +gla d +ff ic +sc o +ak es +el le +ter min +ton y +p ale +col our +seri ous +pat ri +movi es +b m +professi onal +ad o +al u +br inging +f alls +isra el +ter m +langu age +bro ok +man n +commun ic +can not +ac ti +p he +y an +entrepre ne +tur key +log ical +lon g +ar m +ur s +work ers +ing ly +gg s +ri c +tu al +recei ve +op ens +ge ar +soci al +fe et +c king +ad ver +fin an +fe els +sp la +h r +ea ster +bra in +ã ģ +fi g +le dge +ne arly +prote ct +ma ssive +e th +aw a +ðŁĺ ģ +y rs +aware ness +defin itely +k n +imag ine +k u +syste ms +ðŁij ı +f as +li k +provi de +am o +disco ver +inf lu +ma ker +g az +fit ness +stre et +er s +te d +w c +ys is +pos itive +hel ped +que st +andre w +bra d +b in +hang ing +l ing +bri ght +se ction +ma ss +ðŁĻ Į +follow ers +ho sting +tem por +fla g +a ve +let ter +k ur +re qui +of ten +cry p +su ff +âļ ½ +russi an +treat ment +al le +ha y +l an +keep ing +hol y +power ful +pre dic +fun d +e specially +windo w +je wel +il y +ðŁĴ ľ +gener ation +app a +seri ously +o d +ðŁĺĤðŁĺĤ ðŁĺĤ +cer ti +iri sh +ðŁij Į +mi ami +be th +v ity +se cu +che f +cri me +graph y +ma x +arti sts +re volu +gu ard +spee ch +u c +upd ates +fac es +st ant +chang ed +repor ts +low er +pe ar +n c +k il +loo ked +spe aker +s f +re spect +ok ay +oce an +s itting +architec ture +tra il +se at +i ra +le g +japan ese +d am +u lar +sw im +polit ics +finan cial +ol d +mou th +at temp +de stin +fi shing +atten tion +me m +chang es +deci ded +reli gi +g in +c av +z z +ad am +ma c +wr ite +beg in +sc ul +al ter +is s +ath on +imag es +m oo +jo ined +ðŁĺ ī +âŀ ¡ï¸ı +pas sed +mu sli +h ir +lar gest +cam er +com ic +gh ted +rug by +bur gh +gg ing +te sting +pre par +lau gh +al ed +impro ve +beli ev +adv ice +sha res +he art +tur ning +s b +t el +caf e +n es +dani el +pat ter +t z +se tt +par k +c and +st ick +happ ens +bri an +ne west +e pic +ad or +ki es +war ning +anim als +custo m +ar c +di an +gol d +cor e +t f +c ity +pan ts +re ality +con fi +in ju +fo x +gu il +k new +âĺ º +cor rec +itu de +d den +. # +re duc +pas s +f on +y a +ow ner +re turns +n c +e ast +ap ol +in sur +th o +si m +juni or +be e +ang el +att le +elec tric +hor ror +cra sh +e ye +pat h +sou thern +emplo ye +ge o +t an +ha z +r ally +ðŁı » +proper ty +was n +enjo yed +gre y +g as +bre w +nor thern +hol ding +g p +ta ke +ch art +ly n +dr ama +z o +pa id +throw back +cu p +discu ssion +down town +w ill +le w +b is +t ary +bre ad +up on +r ate +teach ers +it ation +anc ed +cy cle +choo se +d c +ir an +co w +da ve +ra ise +prin cess +fa ith +- > +indu stri +sp ain +guit ar +fac ts +m n +sp en +cour te +go tt +projec ts +au di +o sc +pe ter +s and +intere st +happ iness +ven ue +sol di +surpri se +poten tial +per io +custom er +i i +g ni +manu fac +e co +bro ken +sing er +vel s +wal es +hu s +in j +f our +tal ent +d ying +mat the +fil m +jo ining +s ell +j ar +lma o +sur ger +bb c +sour ces +au stin +ni k +char les +f am +prin ci +ange l +cas h +lo t +o red +pla ys +pl ate +don e +memor y +br ings +n ba +solu tions +teach ing +gr ace +cir cu +hel ps +foun der +mar y +expl ore +de cor +par ts +ch o +inte gr +ha u +is es +pu tting +in er +r it +v y +mic hel +blu es +every day +for ms +bi o +ye ar +p in +t ter +spr ing +) ) +po t +al ing +perform ing +sh an +plan et +mus ical +head s +it alian +stru gg +âĢį âĻ +w ings +pu mp +h h +tr ou +a id +pri me +ear th +pa int +mon t +am y +bb c +fab ulous +fru it +andro id +bour ne +cere mony +enti al +? ? +deb ate +on ing +dra ft +sol ar +t x +j am +cor n +!! !!! +bro o +mil k +po sed +o hi +mo vement +b ren +part ner +p g +et te +ar ies +sh out +n g +leav ing +t ells +sen s +ta ste +kel ly +wor l +gy m +ric h +e gy +pi d +ma s +â Ĥ +courte sy +fran k +incre ase +wr itten +pp ers +re l +ha i +s as +s ound +tt i +w ich +ri ver +.. ." +a g +fel low +ro me +sm all +gen cy +ic an +lux ury +pro of +me t +wild life +mom ents +ra ther +cor ner +com pe +canadi an +lik ely +therap y +li am +econom ic +indi e +rou te +fi ght +ho pe +se tting +ant ly +cro ss +fant asy +de e +sket ch +comp li +ym i +ru les +engine ering +fig ure +ro w +. , +f w +syd ney +w ou +t ation +dre w +us es +the re +sp read +struc ture +pat rick +appa rently +ro s +h ills +w we +ann y +com mission +di v +f ying +con sul +anal ysis +ex i +ten nis +vehic le +ðŁĺŃ ðŁĺŃ +as s +high ly +op ened +b ann +ðŁĴ Ļ +mp h +wi shing +v or +fi f +give away +r r +ra y +je ss +g at +ic ymi +x it +high est +yor k +pi e +invol ved +high er +ri e +mal ay +int elli +desp ite +che e +sar ah +be an +reco gni +ar sen +tal ented +pas sion +ic h +ab c +lead s +dise ase +v is +se c +pre senting +m illi +hol e +sho ts +de part +surger y +gov t +b in +du al +e vi +lon ger +ev ol +scre en +portra it +et c +lo se +ch at +p en +p i +om a +s ick +er c +compan ies +en try +plan e +gr y +ven e +liver pool +premi ere +sha red +a red +fil ms +ir a +holi days +cric ket +ici an +v ing +. ) +ul timate +di vision +con duc +se pt +for ces +mon t +s mart +disa pp +sun shine +in d +b less +ma de +col ors +fran k +ir on +bott le +s go +m ood +j ason +er ic +bir th +te en +respon se +tar get +state ment +fe ar +th el +al um +ar ab +bl in +direc tion +ste ps +er ial +wor ked +at l +ðŁĴ ķ +fel t +pol i +scen es +hom es +b ell +e at +ate ful +t in +l ace +fol ks +p se +an n +wis dom +fa v +but ter +s r +are as +sm oo +bi z +dg es +app o +mo re +the m +effe ct +windo ws +sun ny +cap ital +tot ally +c ities +gr ant +mb ers +s low +au tu +il ities +w ro +ri sing +st ics +viol ence +i gh +qu ot +h it +t c +herit age +bu ff +ne s +z ar +den tial +ex ac +ed ge +de ep +aren a +be came +benef its +mar ks +mb er +a z +am es +pre ci +dra gon +re g +d ings +do s +ðŁĴ ª +n el +s ity +me al +di st +leg end +pur chase +pic al +st ick +f at +du ba +profe ss +car to +pro f +coun tries +respon si +se qu +fa b +tribu te +hon ored +prac tic +pur ple +an ton +pa red +t ough +summ er +environ ment +s ons +ðŁĻ ı +m ps +gi es +her oes +t elling +hen ry +f en +know ledge +Ģ ï¸ı +f r +ne g +u re +ac king +hear ts +s oo +hol lywood +ju mp +sau ce +schedu le +tur n +yo ga +cre ating +c ket +cre ek +â Ń +custom ers +ma dri +gu l +asse mb +moun t +c ell +to p +st al +dav is +t wi +sig n +premi er +iti ons +he aring +un k +pati ents +app ear +heav en +al ty +doc tor +a e +plat form +je ff +ðŁĵ · +regi onal +bi d +box ing +ex ten +or ity +a w +w ise +il le +sever al +bi e +s itu +sy ria +âľ ħ +remin der +enter tain +li on +part ners +in n +ph ar +f au +pl s +expe cted +sug ar +deci sion +s b +ch ron +associ ation +leav es +vis ited +sh ap +ðŁĴ ĸ +fur ther +h ann +w i +run s +l er +fun ding +fil led +.. .... +tin y +han g +or g +co ol +se min +ðŁı Ĩ +spon s +nav y +sa int +dru g +d al +r oun +co vered +tra ditional +invest ment +de te +al ism +f low +n is +sun rise +fe at +f ted +we ird +je re +ve gan +medic ine +an o +ac cu +deli very +temp le +chang ing +wil son +phili pp +re fe +n d +is er +g ay +r and +ati ves +t ely +p and +intelli g +g are +am bas +de mon +commit tee +strate gy +refu ge +bud get +prote c +pi er +ex press +nom in +econom y +al low +ic on +gal ax +o h +indi vi +dem and +vir gin +lu ke +ali sts +man i +s mi +ju dge +ent y +mic hi +resul t +am ed +spe aks +' , +hou ston +sh in +b ing +fl y +ch em +au to +v as +ge t +ar m +thank s +d in +gan g +x x +si on +loc ated +p l +jo sh +in fo +jo ins +adver ti +ot d +el d +si e +re asons +v ent +ðŁĩºðŁĩ ¸ +â ł +convers ation +stu di +ðŁĶ¥ ðŁĶ¥ +go s +s ounds +un it +mu sc +ge l +ack ed +pac i +co s +de re +u u +a o +la m +inspir ing +ar ms +tw are +mat ters +ad dic +du de +ex t +cri sis +b ath +me et +sing h +expe ct +del hi +resc ue +wor st +au g +shi pping +ser ving +st o +dar k +ac es +histor ic +landsc ape +desig ner +b illion +gr ateful +wa ke +e ve +m iller +hou sing +dy nam +is co +be ha +sh op +pr ou +e as +a sia +e ding +k on +depart ment +aw ar +mar ine +in ci +photograph er +ta pe +lo go +r ings +d it +-- -- +vin yl +w c +vo ting +se ven +ambas sad +dal las +t u +com ment +k ra +b les +w ag +u d +au dio +stri ke +offici al +o ts +me tho +to ols +ra di +al an +hun t +wat ched +a ke +fa ke +drin king +mer ry +m l +b day +ri o +ni ke +c ant +re pe +co stu +mur der +ak ers +ch ers +ou ts +beg inning +so s +ad es +n in +not es +wro te +sol o +c i +li ghting +ur ban +bre xit +att end +shir ts +pla yo +ac tress +pl ic +stand ard +quot es +par ade +anci ent + © +tur ing +re e +pri mary +fla sh +citi z +mat es +ste in +z i +clin ton +sk in +gen e +hu m +g ar +t le +y i +fo cu +de an +pl ants +cy ber +b u +om e +ho p +ad dress +ti x +gi fts +relation ship +sub scri +fe ed +exac tly +haw ks +ex o +stre ss +s n +arre sted +an e +sof tware +z ero +the me +mu mb +im migr +mi a +make up +ple asure +uni vers +har b +eng ine +ap er +r in +br a +institu te +le ather +al th +sing ing +co s +gh ty +me as +st ic +si de +insur ance +co t +pit ch +moun tains +cri min +su pre +valent ine +at er +wou ldn +sc ale +rel ated +re gar +star tup +pack ed +mi ke +week ly +p ts +coun t +ha r +gott en +min d +ber lin +con ditions +swit ch +cor n +sa ve +g li +emer gency +tun ed +sto ck +discu ssing +every body +s day +whe ther +wrest ling +ec es +gen der +ch en +ðŁij Ģ +madri d +mar athon +e gg +i er +th x +as king +kore a +wol f +ay a +g m +g au +at ory +v r +gra ss +k illing +b ble +ur o +un i +e th +sh ore +th en +re ale +bot tom +ex erc +k ar +or ies +ad ri +san ds +se x +. ' +volunte ers +per form +par liam +inclu de +deli ghted +execu tive +fu el +kis s +ã ħ +char ge +h u +ca kes +ve t +g lu +agre e +pr ices +n au +h l +g ru +ra j +streng th +b ic +sp ending +al es +av en +b last +: ( +yo f +nor mal +si x +qu ick +se a +d aw +mee ts +lo vers +upd ated +po tat +comple ted +coo k +opportun ities +p ure +organ ic +tem per +c am +avo id +par king +duba i +and o +di stri +to y +comple tely +don ald +tri al +bas s +b oun +back ground +v as +mar vel +lu m +ru s +t ool +com missi +throw back +fin ding +is lam +! ? +st op +e vil +or al +resi dents +i denti +o ak +ðŁİ ¶ +l il +span ish +chap ter +sto pped +direc t +ho sted +pic ked +lab our +lew is +defen se +à ® +health care +wh is +mat h +pe ak +ra ised +fi x +bu ll +th ir +chel sea +fol k +tr e +can di +pau l +ei ther +ad am +poe try +jewel ry +ðŁ ¦ +pr ay +Ø § +g c +o z +wi shes +fore ign +sun g +lear ned +en e +n ing +micha el +illu stration +legend ary +w av +b au +ðŁļ ¨ +cal end +stre ets +â Ĩ +mon ster +bu ck +g r +scho ol +ba th +wa ste +ne ck +ha wa +be ach +re plac +jec t +on er +fac tory +coun t +ðŁĵ ¸ +mor gan +der ing +se an +steph en +de p +no vel +vide os +ic al +press ure +arsen al +ex pre +ir s +tren ding +ss a +fla sh +re sear +thr ough +profess or +scul p +to s +gg ed +mm a +be e +a pe +hun ter +am i +he i +pla stic +bu cks +uni verse +le gen +niger ia +ple ased +ri s +thin ks +autu mn +i ds +d is +anth ony +ðŁı ½ +ak ed +gla sses +fin ance +z er +k as +con tract +nu mbers +sh aw +partner ship +t il +laun ched +s al +victor ia +theat er +usu al +nam es +perio d +eli za +i th +bar cel +ro cks +bag s +mat e +distri bu +j on +di ffic +ali zed +cur ren +sco red +b ha +du blin +ro se +in ted +soli d +beha vi +wal ker +simp ly +garden s +head ed +in i +ohi o +we ap +f o +gl en +e state +ran dom +th under +thr u +k ill +jac ket +it i +entertain ment +thanks giving +ent al +en coura +el o +a ther +tan k +high lights +f ting +ru le +model s +bor der +bj p +hus band +in done +ken ya +be ars +al o +n inten +pi x +str o +or ders +sal ad +ro ads +n or +l ation +sop hi +ðŁı ¼ +pi eces +b one +min s +inclu des +nu tr +phi l +s ent +fun dra +ga in +bor ough +n ad +mon day +activ ity +it ems +be coming +ken ne +de tro +car di +gue sts +u x +world wide +sever e +new s +thank ful +fic tion +ve ge +m all +si an +er al +inj ury +le e +men u +danc ing +scot ti +exam ple +( # +na i +studi os +ba i +ðŁĴ Ľ +j av +diam ond +vin ce +ric k +prote ction +lin col +cham ps +appro ach +d ar +m ile +clou ds +je ff +in fin +l ers +p les +pe ace +go p +âĻ ¡ +tech n +str a +a verage +ef fort +introduc ing +di versity +austr alian +am p +boo st +s ke +pati ent +appreci ate +ici ans +pu r +f ell +woo ds +illu str +ðŁ ĸ +ag ency +ac tions +brit ain +under way +se attle +el and +ag o +f ill +stre aming +pro test +challeng es +ky o +et sy +coo king +exper t +ru ss +rain bow +commer cial +sp in +be ats +c ry +val u +el i +th row +gr ams +le vels +michi gan +c ad +ador able +const itu +w s +pu b +mid night +th at +net fli +braz il +die go +regu lar +jo y +âĤ ¬ +li qu +ea stern +k ni +fl at +n p +bro wn +w er +se y +tt ers +ac ting +v anc +cy cling +program me +ra w +comple x +tat too +throwback thursday +se ssions +ro oms +si ght +speci es +bom b +lau gh +ke eps +mo on +offic ers +con ver +t r +ha sh +t ack +ri ous +ad ap +a j +reco gn +ex po +sug ge +confir med +rol ling +dre ssing +ic t +fri day +ph ones +ri dge +con cept +ro y +ke ys +ef for +c ate +k ne +ev en +l ay +commun ities +mo d +n az +every where +al ab +bit coin +ban ks +out door +feder al +sto res +h p +c al +m ely +sig nific +be ar +re public +clo ser +al lah +pic k +x d +pal ace +ch ill +b am +er ous +un a +al len +out standing +olym pic +supp ly +fi gu +v au +l p +char lie +un es +> >> +legen ds +ici al +co ast +benef it +mul ti +f its +far mers +am ount +si sters +har ve +hon ey +que en +b ers +pl ann +âŃ IJ +m u +barcel ona +al ber +stat us +re main +ex tra +c andy +vi ous +âľ Į +o v +warri ors +-- > +ju mp +am ar +x mas +stu dies +i ors +k or +don ate +pre p +fi sh +im a +pain ted +ad mini +co splay +spor ts +dro ps +fi ghter +evi dence +ðŁĴ ª +la ke +ro b +cine ma +pro file +à ± +stan ds +leg acy +sh ape +ro of +ci vil +i ans +sy l +sh am +vo ted +re tail +ph illi +li sted +du ty +n b +th es +f are +au ction +ffici al +stor ms +d p +l oun +sh ops +al y +ani me +multi ple +ðŁĺį ðŁĺį +psy cho +je an +ap art +candi date +gg y +con f +jose ph +w ick +me at +fr ame +c l +for got +ph y +f ing +li ed +re p +se ed +f all +u fc +nu t +lin d +mo de +fiel ds +en ce +s ley +ðŁ¤ Ķ +ch ill +follow ed +announ ces +cor ru +tro phy +them selves +ac le +al du +k ong +l on +s v +bro ke +ander son +ta i +stor y +tempor ary +activ ities +k ati +ari z +cry stal +spo ke +extre mely +tra ding +ðŁĴ ļ +à ¼ +in ch +ed in +out fit +equ ip +ma di +form ed +be ef +po p +ti ger +this day +ti red +neigh b +re tro +is a +un t +t as +kan sas +de st +secon ds +ta y +hur ric +o u +galax y +dad dy +bro w +bur ger +en ced +de sk +ac cur +secre tary +el ite +k ab +ch in +touri sm +bud dy +ici de +dre ssed +u d +vac ation +che ers +com for +charac ters +j et +bu ying +l ins +n ap +reale state +li e +af c +i ii +f ame +n r +b at +ag ent +ma kers +âĢ ¼ +sec tor +op ti +le on +di et +pra yer +hi p +mi r +le x +br y +an a +pas sing +w en +reco very +ak i +po pul +res ort +mar ia +stu ck +read s +ti er +perfe c +netfli x +p oo +cham p +o c +re duce +we red +comm ents +cla im +acci dent +s ag +h ack +sal t +kin da +k iller +i os +z y +ex change +lec ture +eng er +ic king +t au +reve als +pri son +z om +gh an +u l +jour nal +i ot +tr in +jon a +govern or +cap e +quar ter +spec tive +impre ssive +bab ies +t x +m ill +o y +har ri +jo int +su e +collabor ation +tren d +revolu tion +re new +alum ni +ge tt +sh ell +sun day +ent u +ni c +donald trump +block chain +paci fic +expla ins +sp y +ad voc +par adi +to f +star ring +p av +fe ed +br ac +smo ke +ham p +y am +to kyo +si mon +d h +e ffici +phys ical +n j +ell i +s low +gradu ate +americ ans +ti fy +f red +ap ore +fin ds +rob in +we t +not ice +se mi +un ve +k om +pil ot +scre ening +da ily +ðŁĴ Ĺ +roy al +sp a +vo tes +n ag +wh ate +att ending +exper im +ad dition +k ate +sto l +m ali +foo t +chri st +ch an +de e +lic en +glo bal +mo ore +ti a +bri gh +myster y +y ay +âĿ¤ï¸ı âĿ¤ï¸ı +cre ati +me chan +clo ck +di c +âĢ Ķ +pp er +al ph +through out +al low +re sources +selec tion +ham il +bb q +aa aa +virgin ia +dis ney +en g +so red +drin ks +f ancy +consi der +end a +jan e +hand made +du l +on tari +i us +s ville +color ado +whate ver +whe el +promis e +ne ver +desig ns +ab ly +sex ual +vanc ou +at i +con vention +cul tural +sing apore +pro mo +load ed +gla sgo +pp l +n oo +ke e +ste m +men tion +i do +cru ise +ri ding +be comes +be y +âļ½ ï¸ı +tw in +dedic ated +na sh +de si +work out +jen ni +i v +grou ps +rela x +pho eni +li ft +mix ed +m ck +p c +mu st +me tro +ci es +y ar +a im +ang er +i e +rec y +marri ed +dro pped +eng ag +le st +ambassad or +op h +de s +w ick +assi stant +nat ur +fa il +l td +shor t +k ap +sha w +bi gger +rema ins +crit ical +sur vey +co verage +er son +win d +n b +bil ly +let es +ac ts +jim my +at lan +al and +t c +import ance +dam age +f g +stor age +tw t +bon d +bal ance +cr ying +pu ppy +vo te +pu sh +ðŁĴ ľ +pol y +me l +lon don +terr ori +effec tive +corpor ate +atl anta +jac o +nas a +gre ek +sen ate +i sh +ev a +intellig ence +effor ts +al co +k un +h all +di ag +claim s +fir st +h b +ba e +v ul +pu ll + ° +se par +spe ed +vic ti +on thisday +audi ence +r ates +te ach +fil ming +bu sh +son g +y um +br un +ra ine +aw a +par ks +ð Ŀ +ra bb +ra ch +ra id +reach ed +ra il +mo ves +selec ted +fr i +ra ising +om y +st ones +su k +franc isco +cas es +cap it +con fu +w tf +po ke +equip ment +gre g +ess ential +off ering +ne x +pi es +be c +cre ation +chair man +cro wn +w al +john ny +shi ft +ne ck +ban g +bir d +ðŁĺ ı +du ck +re serve +de pu +ma sters +over all +no tic +ju ice +sne ak +che er +cla sses +eag les +n ca +car pet +ci vil +coach es +har ris +u ps +b alls +dec or +mar tin +ro s +v ice +announ cement +who se +ti gers +ste red +c ts +dr am +ste el +youn g +inst all +supp o +recor ding +de ck +se ats +l der +ang le +bo t +sty les +elec tions +for tun +n ab +but ter +ari an +ka sh +in ner +ou red +be ast +we i +ic onic +exper ts +ne cess +b eng +jam es +li a +gre ece +ðŁĵ · +ðŁĺ ģ +good bye +m itch +tw ice +mumb ai +ste am +ru sh +med al +ne tt +fashi on +t ar +r s +sav ing +ric ul +l m +sleep ing +brook lyn +mis s +sen ding +disco vered +sp here +of theday +k icks +missi ons +w right +er n +ght ly +i ous +mel bourne +star tu +mo ved +car ry +d ak +ag ues +bel gi +e ma +way ne +do t +er ie +pe l +it unes +matthe w +no body +est ab +cal m +win ds +lu c +prep are +tren ds +exerc ise +adv ant +ðŁĴ ¯ +athle tics +app s +c tions +adv ance +laun ches +litt le +real donaldtrump +eliza beth +carol ina +hu b +hi dden +n w +us er +pol l +great er +mo st +f ed +p at +life style +s ati +sco res +marri age +l r +aven ue +de serve +ri f +ðŁ Ĺ +wat ch +champion ships +gr ay +en ni +cot ton +g om +whe re +pack age +su m +ab solu +new ly +foo ds +ty ler +assemb ly +musli m +ban k +re memb +op tions +produc er +land o +fun ds +u pper +shad ow +pro gre +co p +ing e +leg s +detro it +hill ary +jo se +gi ants +sou p +sustain able +t us +clo thes +roc king +n z +min ne +mat eri +bru ce +ear t +ca sting +independ ent +thou sands +ta h +de cl +veter ans +li ons +wra p +âĢ ¦ +de ss +bl ing +st ine +e ggs +o on +clo sing +z ay +at t +bac on +fa il +ariz ona +de pre +gho st +new sp +w ers +vi p +li ked +id ent +volunte er +ad ult +pu pp +cir cle +mat erial +degre e +gro wn +boo m +calend ar +su r +vie wing +ath letes +ch and +re ll +asi an +en tr +vol ley +victi ms +bo dy +m ama +trans fer +ge ek +in dic +sav ed +ma i +g ent +it s +loun ge +k ol +the ory +situ ation +is lands +ar th +z oo +floo d +vi ously +show ed +parliam ent +ch ev +el ine +at trac +ab ad +ta il +h rs +lu s +por tu +gor y +provi des +to ys +de ath +in fe +an ce +g le +li am +lo ver +hu d +dv d +reve aled +g w +re ment +ca the +l ying +ra dio +der by +stor s +che mi +hosp it +âľ ¨ +' : +ilo ve +le mon +re public +s ni +ne ss +do or +re action +pre gn +fla v +schol ar +spo tify +is ation +vis ual +aw are +spon sored +jo ke +less ons +leg is +lo ck +si mil +ðŁĺ ĭ +kin d +la y +ma h +ho ping +vancou ver +as er +clean ing +gal a +thre at +la p +ach e +ro mance +ex pen +re post +z am +e pi +mir ror +o ak +ad ul +bat man +s lu +l c +vie wed +re views +d ates +indone sia +acti vi +off en +lea f +i si +ag ricul +costu me +s ites +spir itu +appear ance +ir y +st air +applic ation +spec tac +ic ity +ski es +hand le +pun k +paradi se +t n +de al +provi ding +do c +recei ving +bre w +micro soft +à ¶ +fer r +me tro +th ail +y um +car ter +à ¡ +gent le +bre aks +coo per +show case +cu tting +egy pt +bab y +semin ar +gl ori +ss on +fa ve +re hear +lo tte +la dy +al as +pre p +deli vered +nu clear +ir o +engag ement +at ta +con ven +z an +gl ory +hol ds +busine sses +str ange +sch e +it self +gra d +mar kets +f alling +st ats +ge on +bu dd +li s +she et +thi si +co lo +deser t +regi stration +ig n +expla in +inter ior +la ws +writ ers +spr ings +k r +fri ed +blo om +inf ra +a o +cre d +pa st +line up +bo o +bre a +boo ts +celebr ity +att acks +bro ok +ev es +ex cu +cher ry +oo p +fas cin +boy friend +se as +n ine +effec ts +po wered +k ha +ðŁĺ Ģ +sh out +con dition +i j +her o +enter pri +win ter +applic ations +sho e +g el +batt le +pro grams +w art +ðŁĴ ¥ +ra p +ho l +dang erous +di a +coun ter +ric s +i or +k night +co at +emo tional +at ures +d as +whe el +fore cast +tran sport +glasgo w +king dom +prepar ing +im medi +ff in +awar ded +prin ting +ro man +fight ers +any more +bel t +p ine +win e +x i +employe es +logi es +al led +de mo +birth day +ange les +lo g +dri vers +neck lace +k ath +s it +athle te +ef s +s burg +pur pose +resi stance +rele ases +t is +vari ous +deli ver +ch al +s anc +opp o +cra w +neu ro +dr a +suppor ters +sna p +diffic ult +swe ar +logi st +pa th +attemp t +à ¥ +swim ming +ste ve +hur t +inclu ded +b ap +wa re +ðŁĴ ĭ +end ers +ja ke +le eds +cli mb +l b +im ple +li sa +clo thing +ðŁĺ İ +d t +com pla +sw ing +stra w +v als +k le +us ers +stor m +cu ts +ontari o +p an +hand some +i ow +ar gu +chec king +scotti sh +Ķ ï¸ı +si er +em ma +po d +patter n +de sh +en h +ed ward +t ing +k h +hal f +lincol n +mo ther +al leg +r c +volley ball +d n +g ay +all y +le ton +gro ve +l oud +adv anced +re spec +cli ent +supre me +thail and +ho w +gi g +to i +do t +dol lar +ðŁij ĩ +p it +r b +h n +produc ed +gg ers +âĨ Ĵ +ml b +can vas +fin eart +us d +in the +p son +actu al +s l +t b +ip ad +en sure +u mb +w d +sk a +mar s +k end +f eli +th ing +count down +absolu te +r out +dra l +p y +inju red +min t +hun ting +mm er +s age +li gh +ac ity +ex pan +mur ray +ar o +sec ure +four th +eag le +reli ef +st akes +industri al +clar k +under standing +see m +pl enty +sil ver +cla u +thre at +sa il +pro duce +ab str +is is +b r +eng ers +wor ry +bie ber +s j +just in +reali ze +ky le +esp n +fil ter +s ch +ty pes +game dev +d ing +twit ter +soldi ers +p om +car bon +y ards +child hood +ri ed +ke l +ele ph +t ons +key note +qui et +wi re +po sting +is sa +repre senting +bac ks +alex ander +celebr ates +ta ining +| | +ch or +esc ape +pe ek +ti ves +fiel d +ssi e +im pac +spons or +r c +we dd +cann ab +si des +trac ks +com par +con trac +techn ical +bi ble +expl oring +sh are +tra v +n ate +ill o +sc ru +m ingham +gun s +of the +sh ame +se es +ca tho +ac cess +ce l +repor ted + » +mari o +p ad +hope fully +ou se +y on +disapp o +ol o +p itt +pa c +ga p +cru sh +s g +k le +ge m +emp ire +dir ty +a is +avi ation +ze aland +fac ing +high way +d anny +spi der +ot ta +ðŁĺ Ħ +w y +col ours +in fl +co sts +olym pics +au s +h m +ho ward +pas ses +lau ren +mu sh +op in +r ho +disc ount +oper ation +em ily +mm m +cham ber +d il +to yo +shi p +sam u +pic tured +un ic +po l +keep er +carto on +st en +ig nor +n ations +n l +ta sting +deta il +offici als +mo tor +franc is +ed itor +ðŁij ĩ +pe ts +rang ers +t g +r n +w ri +nic hol +i se +spo ts +ani e +chec k +tri ple +ku mar +spe akers +ic ing +pre pared +ab use +friend ship +mon th +swi m +air e +sc ent +hamil ton +indi an +j es +yum my +te ars +da wn +i zed +worl ds +ðŁ ķ +b illi +st one +n hs +ba sic +p or +st le +ir on +ol der +cle vel +e ing +ðŁĺįðŁĺį ðŁĺį +prin ts +fir m +air craft +fin est +devel op +aar on +t z +gra ham +own ers +fo li +less on +qu es +bab e +cra ft +ph en +ju n +bir mingham +v ine +ll er +i an +fineart america +evol u +st ab +im per +war d +com ic +wi z +inv ited +du ke +mat ch +por ts +ro ger +diag no +ke pt +te st +vis u +r hy +so c +to x +b aker +sur face +co vers +man s +b its +x box +ff le +n an +gar d +h art +wat ers +v illa +re tro +light ning +catho lic +democr acy +neigh bor +pen n +cr an +jona than +la ura +vi bes +su b +coach ing +clear ly +uk raine +bra ve +commit ment +t all +mar t +ra p +mo di +sco tt +bro s +show er +ðŁı ¾ +âĺº ï¸ı +cou sin +appro ach +br e +com pos +hil ari +phil ly +g ad +quick ly +ri an +t m +vir tual +hou ses +k t +phoeni x +w ire +ff y +b unch +anc ing +tal e +snap chat +star ter +h t +k icking +ap art +th y +) ! +blo gger +it z +com fort +ang els +w ash +" : +ar gent +re quest +hon est +mi ghty +bo bby +k g +ro l +thou se +ex po +h c +tab les +mag ical +po sts +de m +n w +or lando +ab er +* ** +ðŁĺ ľ +environ mental +trans formation +mi le +w ic +hir ing +ma ine +bo ar +r ying +ti s +nit ure +twee ted +anton io +opin ion +fin ale +di y +f is +th in +trou ble +le go +fi les +qu art +sp a +curren cy +cli mate +fan art +rail way +sp ace +ban ds +dani el +mo tion +l eng +hol der +oc cu +mar ie +cathe dral +bu zz +bi es +nas car +bm w +bat tery +char lotte +doc tor +zz le +se ven +in san +d dy +st en +lab or +thr illed +se ren +docu mentary +wav es +cer tain +can did +allow ed +ninten do +star wars +ta p +home made +d les +ther ing +bre e +emp ty +pi ano +pos iti +coun try +por k +pu ts +per ry +m atic +spot light +ti st +or ities +we alth +c p +bar bar +commit ted +as sau +pro fit +e ight +hu l +fini shing +run ner +ss o +insp ec +char ged +christ op +lo sing +co al +ho o +ele v +de le +mo ham +don ation +c able +clin ic +j in +manag ed +ter ing +â ¬ +ur ban +depu ty +bb er +bur n +acade mic +o tt +sta ke +it er +sto wn +ack er +advent ures +ad ams +gre g +pro m +vo l +ac qu +con gre +pa int +citiz ens +c all +af ford +v c +as ks +the tic +independ ence +â Ľ +h itting +bl on +fu ture +â ı +in no +gen e +bo ards +di stance +se t +re mem +th al +pre vent +l ang +ob jec +su sp +mat t +in duc +bor o +pi one +re di +vir tu +prin ted +sco pe +shar k +suc ce +a stron +il legal +j ag +c ting +ine e +at o +rob in +nutr ition +b f +du tch +b n +fur niture +for gotten +at ar +ru p +hy per +bran ch +communic ation +degre es +on ia +un cle +promo te +or che +wi i +j s +but ton +ma jor +c bs +bri stol +premi um +ordin ary +e dit +m g +we ed +st even +: ' +gu s +te s +cap tured +dru gs +do w +wr ites +bi shop +whe els +ali zation +disco very +w r +rach el +ne il +hy dr +cu test +entreprene ur +kore an +ore gon +ul ty +perfec tly +suppor ted +histor ical +t wins +ell y +we l +de vil +in come +scienti sts +de leg +h en +on i +ic ed +gi o +cur ry +reve al +e g +buff alo +n ol +op era +camer on +haha haha +j ab +gradu ation +cra ig +r al +i f +organi zation +le ge +g ang +su d +edin burgh +l ack +fli es +g ate +thr ones +q b +the real +e leg +pp in +c les +jam ie +tn am +cryp to +ou l +p ages +a se +roo ts +stu pid +a did +boo t +prote in +s ap +si um +su s +end or +fun ction +don t +en na +ch y +squ e +wor ker +m tv +e a +k an +ðŁĴ ļ +mu s +professi on +t to +oper ations +al lo +c tor +inv ite +sc and +ou th +z im +lin ks +cli ents +sam sung +discu sses +n ell +ul tra +some where +ste wart +ine t +de z +b out +fac tor +ti an +tr ans +jere my +d b +ðŁĩ ¬ +or n +develop ing +spo l +coo per +ma u +rememb ering +tre k +famil y +sen iors +fo ster +att ended +w ing +trans form +ele mentary +hor iz +li sting +malay sia +it ch +warri or +philipp ines +russ ell +m end +initi ative +cre ep +to ps +br iti +a ur +shar p +adverti sing +ug ly +achi ev +materi als +bu g +dev ice +bon us +fac ility +col e +nh l +y as +plann ed +pol e +excell ence +tr ick +con fl +r p +achi eve +lo an +swa g +jess ica +ho we +p our +sc u +z oo +r ated +dre sses +re bel +mex ican +co ordin +me ss +atlan tic +t l +osc ar +wal ks +phar mac +investig ation +... # +cc i +eas ily +monday motivation +y ment +au ti +for ced +ar med +colle agues +pap ers +pro per +sha ke +bu c +le an +exhi bit +e vement +co tt +bi z +sp er +k ent +sw an +/ @ +girl friend +haw k +âĺ Ģï¸ı +mon o +ðŁĴ Ľ +stat ue +ðŁĺ ³ +ra s +te eth +preci ous +t ile +p am +swi ft +v ali +no se +dr unk +experi ences +come back +gen ius +wor se +sh ef +ra d +ed it +hon our +au spol +lar ry +h ire +gor don +achi evement +.... .... +su icide +alter native +su p +sur roun +sha ke +ke ith +pe pper +tur k +crimin al +be ck +su m +w alls +cn n +an tic +of fe +col li +win es +high light +hawa ii +emb ar +l fc +ðŁĩ ® +m v +> > +at mo +wor d +car l +shout out +bre wing +ì Ŀ +do f +s ic +hot test +col on +hh h +shu t +low ing +volu me +apart ment +agre ement +de stro +we e +religi ous +iow a +ro d +land ing +re present +ðŁĵ· : +la s +usu ally +h l +c ac +sal v +al ong +laugh ing +be ans +remin ds +pha se +some body +ma sk +ran ked +dest roy +sc i +âĢ¼ ï¸ı +gab ri +le o +ro a +fa iled +si l +refuge es +re vi +r ing +ber ries +coo kies +y y +conserv ation +sh ab +human s +de termin +a in +ni all +as su +mb a +fro m +extre me +vic es +commer ce +ght ful +or dered +suppor ts +re cap +v or +dro pping +correc t +pay ing +mean ing +n j +qui z +" # +busine ss +ðŁĩ® ðŁĩ +indi gen +du st +box es +bl ind +x xx +zz y +ðŁĩ¬ ðŁĩ +ss els +s ant +dd le +hilari ous +desig n +wonder ing +vehic les +k re +ju d +rece ption +par ker +Ã Ń +pri vi +hy dro +sof tball +pol lu +lo cked +ba h +e ar +scri pt +di vi +br ace +geor ge +the ast +bel o +j al +tion ary +dent al +roc ket +pur ch +sh ak +manufac turing +e z +it is +con cep +tb all +ch s +direc ted +pra yers +oo k +phil os +vari ety +che ss +ser ver +g and +bal ti +ðŁĵ ¸ +sel y +cru z +spectac ular +bur ning +re present +i z +t one +mer ce +h ell +bed room +estab li +bo l +com mon +ãĥ » +ab or +kit ty +hei ghts +re pair +willi am +qu ake +alab ama +popul ation +re v +re tt +i sts +n ite +le m +a ha +clevel and +r m +po ver +ob se +mon tre +man ia + ® +con ne +car ni +sh ah +f y +u a +sc or +strugg le +bo b +' ' +appro pri +deci de +ff ed +ca ster +s ort +hun gry +dra g +ا Ù +gr ounds +d w +sli ghtly +car din +dead line +bron ze +web in +bar ry +sil ence +e uro +op tion +ear n +ðŁĴ ĸ +howe ver +na ren +na ils +bath room +v ine +ph d +min ing +gar age +( ) +shou lder +defe at +di r +o v +liber ty +ple as +x on +com pre +a v +j in +ab les +sil ent +fam ili +vis its +di pl +ha bit +milli ons +regar ding +innov ative +sen ator +r ts +v on +k l +wh il +requi red +âĿ Ħ +lu v +presi dential +po cket +hun dre +sho wn +fro zen +to ward +fa st +confi dence +r ough +indivi dual +qu et +ðŁı ½ +dom e +fi fa +engine er +z en +re mix +ðŁĺ ĥ +pl ant +min or +robin son +as y +pul led +cer tain +potat o +( : +pre s +oc ca +w it +it em +si e +d ating +thom pson +own ed +an u +vi e +te dly +good night +ex cept +ðŁĮ Ł +ira q +ki e +ren ces +li p +simil ar +sau di +vi g +arth ur +pic ks +mil an +hon da +ma xi +o g +ste st +ar ch +analy tics +ba sti +pear l +ter ry +hor se +ast ro +ac ce +laun ching +inter national +s no +ta sty +den ver +ir l +pe te +tor n +advant age +var sity +" " +sol e +g c +lan g +demon str +ol ds +un ity +ne ts +insp ire +cre te +nash ville +nel son +e ter +wal k +hy un +m ack +tre as +see king +ra ge +bru sh +ab and +whil st +co con +h ong +shel ter +i p +possi bly +so o +it ed +â Ħ +rac es +war ming +qu in +tele vision +mat ches +ra pi +ment al +pal m +jenni fer +rol ls +indi ana +b ars +cat ching +resc u +candid ates +fa re +âł Ģ +se o +vie tnam +alph a +michel le +visi ble +re gre +wn ed +app le +li p +f fe +li z +york shire +ha il +se asons +be gan +m d +k c +la p +fascin ating +hel p +ur y +u ms +nu ts +se m +along side +bri dge +ori al +o ve +world cup +briti sh +comfor table +i ve +hot els +fair s +hor ri +so x +d ining +stre am +bar ri +ss y +w im +ter ms +v u +pe re +l ens +wal ked +r or +l ars +shi eld +dou bt +pro to +cro ssing +me ant +medi um +ad ding +e b +che ap +fun c +pap er +bran ds +ry an +feed back +col lins +un known +tro pical +sand wich +fal len +for mu +selec t +lo ads +answ ers +or i +mag a +d or +du o +ali e +dru m +ur i +de er +sou l +sh ut +âĺ º +sto len +don ated +bu zz +patri ots +ha l +na sty +nomin ated +mon te +ki a +th ri +ing u +te sts +pe tro +ðŁij ij +ho sts +ne st +to pic +pat ch +m my +hu gh +ab ilities +ma the +s miles +g b +ag enda +insi ghts +chi p +ph an +fail ure +dg ers +ha i +signific ant +sho ck +ru ral +gl am +figu res +pot us +o ta +mini stry +appe ars +fe ar +r h +americ an +h att +son y +fi res +e di +n ou +e qui +wh en +univers al +mad ness +i x +sculp ture +b ach +t to +swe den +et a +en to +develop ed +month ly +ma ps +ra h +le d +del ta +sa ints +is lam +ben ch +fif th +v ard +so cks +wel coming +j e +tur ner +v b +ad i +nor way +ad y +hurric ane +por sche +tra dition +ex am +newsp aper +lu ci +a ver +ide al +d na +madi son +ðŁ § +wit ness +ac ou +insi ght +si mon +robo t +sna ke +n bc +ac o +ro ss +sh ment +religi on +ch ann +in su +camp bell +inst alled +we ather +hor ses +ol i +rober t +k az +ðŁı Ģ +veter an +th read +quar ter +ea sier +cap ture +hi pho +law rence +roman tic +pas sion +cl ay +ox ford +th ai +stu dying +fi a +elec ted +most ly +c b +tu mb +âĢįâĻ Ĥ +x l +sh an +fa ster +ev ans +sli de +sh ri +see k +mi es +chemi stry +pump kin +tu m +, , +ro om +fi red +li ps +pres ence +af f +brew ery +arri ve +sw ag +photo graph +pen gu +chi ps +at tor +val ues +accur ate +con temporary +princi pal +cannab is +ari o +any where +gi a +democr ats +buil dings +li ved +ap s +neg ative +m are +bal lo +li on +diam on +loo k +re form +tom my +il la +tre ats +hundre ds +port land +wor thy +ex cep +ar ia +ido l +be er +cd n +y u +aw k +ðŁĩ ¨ +c ells +à ³ +ident ity +dra wn +de vil +f inger +th am +ðŁij Ĭ +ear ned +fin tech +dol ph +twee ting +evolu tion +ðŁĵ į +est im +m vp +n one +ðŁĩºðŁĩ ¸ +toyo ta +au x +mar in +b old +l bs +ste ak +mur phy +it able +lou is +sol ve +pi a +sk ir +ill ino +webin ar +ban ana +lo v +th on +vo ters +afford able +defe ated +lm fa +air lines +super b +any way +deb t +bo red +ver si +me tal +responsi ble +m k +s se +f ay +cau sed +f p +recomm end +pla za +spor ting +alli ance +au stri +n n +t ours +surpri sed +arti f +th under +sur ve +wor e +bri ef +necess ary +z ie +ash ley +dra ke +r t +kni fe +im mun +char ges +a the +bri de +rep ly +g av +broad cast +pu er +brace let +cap acity +harve st +id k +perfor man +d ding +il ers +par a +jam a +pro vince +ch in +id ers +har i +te aser +ch en +re stor +r at +fl at +col om +ðŁĴ ŀ +ðŁĩ¨ ðŁĩ +smoo th +r t +p itch +stay ing +isra eli +t cot +per spective +do ck +open er +lo vel +x o +class room +l ington +go al +kenne dy +sh am +sp aces +mitch ell +home coming +uk i +claim ed +recru it +ing o +mu fc +mon it +g roo +resi dent +per cent +per man +otta wa +int ment +an xi +stand ards +wor ship +sche me +f x +pot ter +bi an +athle tic +af gh +s se +sat ell +par ties +âĿ¤ âĿ¤ +infra structure +rela x +mo du +wor n +smo king +y ach +practic es +wc w +am b +dome stic +tay lor +k entu +provi ded +mo di +ve g +" ... +ob serv +ðŁĺ © +be ard +m our +an gry +ðŁĺ ± +startu ps +woo den +di ve +na il +anti que +ro ses +torn ado +m at +^ ^ +su spect +far m +de vices +me ga +tu l +scholar ship +ge e +disa ster +arri val +po in +mar c +kati e +bb ed +fal se +deser ves +ric hard +ju ana +fre y +tion ed +hy bri +r w +sar ah +ach i +c ure +o le +mor ris +ch ic +broad way +la bel +pa k +pover ty +gol f +e red +f u +er ies +be es +alo gue +st el +wire less +je wish +ti de +blo cked +life time +b har +sp lit +am ster +th i +jo shu +br unch +ha ps +s for +oo ps +ka poor +hi king +suppo sed +ro of +re as +tra in +ti ght +tru mp +bas ically +r r +ea red +see ds +entr ance +c p +wi e +son ic +vic tim +he re +e h +ear rings +sal mon +arc tic +an ne +dou gla +corru ption +hann ah +ha sn +vo ices +con ce +att a +fle et +clin ical +democr atic +ton y +st ood +le f +twit ch +a il +honest ly +incre ased +dro me +don na +accep ted +visit ors +ap ar +ad or +p ar +jer ry +ra i +brand on +ab u +!! !!!! +me me +in gh +glori ous +b hu +pu mp +j ol +li ke +fi sher +ma z +ag an +destin ation +play list +le tters +gen u +br ace +celebr ated +bann er +r he +dra gon +ðŁĺ ħ +sig nature +gre y +âľ Ķï¸ı +al ice +be red +ph er +ber n +ca th +ga thering +sc oring +influ ence +sm iling +de pt +lo cal +a x +ac u +reti rement +hon or +her self +chem ical +asse ss +y all +fre qu +appreci ation +ac a +cho ir +cu z +so il +c il +repor ting +u h +enterpri se +gr at +jaco b +ru m +fe e +j ak +sp in +bi kes +phi a +ste re +p is +bloo d +t att +ra ft +war ren +sh eri +back stage +mar sh +hash tag +ther ine +re in +game day +guar an +reci pes +min ds +stron ger +issu ed +bic y +n ak +ment ed +sc ary +u x +pre vious +tt le +th ats +ac tors +u ma +tin a +bun ny +promo tion +u ss +oli ver +montre al +what s +appreci ated +la kes +excu se +kno wing +pri zes +musc le +shad es +sco t +ing redi +electr onic +ju an +comb at +s ri +e h +turk ish +l om +stri kes +pri son +re e +po pe +vi d +ol dest +dol l +sw iss +certi fied +cli p +re turning +lat or +le igh +tt es +wat son +heal ing +el im +per haps +ha ss +k au +d der +mou se +new castle +indigen ous +wel comes +co le +tau ght +no ise +appe ar +jo e +can on +wedne sday +u tah +c tive +dri ven +i v +c ell +stri p +ac c +focu sed +ar rest +sto cks +wo o +â Ĺ +notic ed +shad o +di spla +ter ror +bor ne +secon d +que ens +wo ke +ja il +no tt +cam bridge +har t +se af +fa x +ac cept +âĺ ħ +goo ds +k at +t win +h s +thou sand +s ins +su ite +amp ton +ar n +rele v +ric har +hoo ps +n bc +class ic +p ab +soldi er +de plo +le ans +install ation +cla sh +le ban +ee e +ti re +belo ved +fu sion +travel ing +ne i +coo kie +glo be +phys ics +s q +co l +wol ves +d l +ex it +" - +foo tball +le af +ster ling +hi de +minne so +fresh man +natu re +indi e +supp lies +bri s +iri sh +ink tober +doo dle +ic op +mess ages +adul ts +recor ded +fix ed +ar do +offe red +under ground +dr one +p ine +ma inten +and re +ham mer +s x +r ound +hi ke +bra d +ro me +fu ll +on ey +ro ws +colum bia +archi ves +appro ved +bat ch +illino is +recogn ition +shou ldn +fo g +nca a +ke vin +human ity +al though +pow ers +p ou +s ar +pe st +alco hol +con sci +phil adel +en o +t m +ok la +cate gory +particip ate +accu sed +bri ef +po em +clu bs +consul t +ja b +big data +amster dam +ac ing +certi fic +n u +d at +impro ved +and y +campa ig +pale stin +p ace +mo bi +feel ings +wol f +bra in +pro pos +inter active +prin ce +inde x +c is +cha e +peace ful +co vering +ac o +cour ses +mon key +re place +b l +bloo dy +tal es +brigh ton +neighbor hood +g ates +spiritu al +af raid +bre ast +b ones +ðŁij ī +vide o +w au +tou ch +inju ries +car l +ri x +une x +âĢ ¢ +fre d +consi dered +thu si +an ch +on y +u sa +graph ics +ac re +ðŁĺ © +com memor +com mod +go ti +guar dian +star bucks +pre vention +haha haha +admini stration +portu gal +fac ulty +bet a +ul a +al bert +bre ath +er i +le tting +tr ic +ment ation +incredi bly +ten nes +v d +ðŁĻ Ī +ed die +br ick +gr ill +bt w +wat ches +resear chers +t ney +ni e +p as +a ster +vi br +poke mon +ch rome +go at +pitt s +il ly +festi ve +y d +can al +ðŁ Ĩ +fi es +car los +re que +partic i +tra ins +sam ple +temper ature +sym ph +pic king +in door +z ers +playo ffs +____ ____ +ap es +ly rics +islam ic +performan ces +d ick +spar k +se as +hom a +gr ound +disc i +employe e +com mu +alas ka +al an +fe ast +dg ing +ban king +manu el +slow ly +tru cks +mc car +oo o +sc rat +orche stra +indivi du +m x +bre ath +stair s +equ ality +bla ke +loc ations +cocon ut +balti more +aa a +l c +ðŁı Ĩ +har vey +resi st +immigr ation +adid as +fil i +re f +lg bt +mo s +pp i +ken ny +terr or +ban e +apol is +s g +social media +ka i +hon est +as sas +bol lywood +âĢįâĻ Ģï¸ı +ferr ari +hor n +cryp to +bo om +mainten ance +i di +s man +w l +ext ended +in sul +ve s +go sp +tr i +pi g +tar ge +cel er +st ati +sm h +ri dic +appe al +? ) +con clu +cos me +she ep +christop her +en thusi +po lish +me ts +oun ded +sustain ability +creati vity +con crete +ra i +ali en +ble ss +te es +clu b +ro t +bo s +ex ist +perfe ction +lu ck +rock y +expen sive +mean while +happy birthday +pre t +thr iller +ca ve +playo ff +som er +l u +le x +def ence +am writing +home less +pro phe +ch et +past or +ðŁ¤ £ +land er +ww w +Ģ ï¸ı +tic a +! # +o tic +rad ar +po sters +pow der +po li +ha un +tra p +bl in +assau lt +shor ts +re y +sh y +squ ir +rac ist +gar lic +fu r +remo te +sm ell +impre ssed +fing ers +âł Ģ +din o +le ment +s nu +promo ting +str ing +produc tive +b age +ma son +ra z +direc tly +j k +ev al +ðŁij Ĭ +doc tors +co w +ri der +st v +re move +w u +na than +ro d +n r += > +affe cted +inve st +mp tion +g inger +o d +agricul ture +s que +mu g +coun ting +ke e +mag nific +coo k +ani stan +roo t +plac ed +sym po +gh ana +un d +che er +thro wing +secre ts +f illing +opti mi +butter fly +bu bb +ðŁĺ ī +terri ble +d g +sil k +obse ssed +lo u +ai de +sal ute +mon u +philadel phia +scienti fic +i st +u ae +dess ert +bott les +can yon +ðŁĺ Ī +car ib +o ther +w ich +re source +guil ty +un d +le on +e ss +kan e +el e +tra iner +he im +an te +man age +roo kie +tre ated +po ses +rs vp +cau ses +aw ak +je well +le tt +on ics +tit les +cardi ff +g aga +bu mp +use ful +? ! +loo se +bb ing +: : +argent ina +de bu +cy cl +wh el +dis gu +j el +k ills +bio logy +ex ter +tra sh +bo dies +tr am +circu it +expe ct +la ds +w ells +sho t +ge e +naren dr +fa stest +b ent +b ills +mar shall +h ats +intro duce +citi zen +im possible +gi b +az z +net working +r ant +thin k +in dy +st ops +f theday +bri an +* * +amo di +dom e +coura ge +pac king +af fairs +g n +si zed +ent ary +pol and +swit zer +afgh anistan +w u +ten der +subscri be +mo sco +att end +republic an +hon ey +âĢ ĭ +si mul +we ster +foo die +or o +midd le +ab t +co pies +ma je +narendr amodi +ty pical +inspir ational +vit am +wis con +cu bs +tiv ity +h ali +e ars +k ay +d are +mari juana +cu rious +an ia +tom ato +re mind +ðŁĩ · +sc ared +cou p +po et +land ed +ri d +wra pped +mor ri +climb ing +e ws +fe eding +con tra +tho logy +gri d +ti vely +read er +la ser +di ving +di g +lat in +ti ed +shake spe +o ci +ad m +show ers +chu ck +mar cus +oo s +kne e +o live +ow l +dy lan +an no +g ym +deci sions +well ness +arri ves +sati s +chri s +thur s +ðŁ¤ £ +inter views +thank you +switzer land +over night +journ alist +ser ves +vol can +.... ... +plo t +nic ol +car rying +mag ne +tre asure +ex p +be ver +ðŁĺ ¢ +mar ty +mo le +don ations +recogni zed +b h +du s +sh ann +al do +success fully +ent e +ðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤ +cab inet +cu is +tit led +d as +so l +strate gies +deli vering +ad ds +ani an +ne ther +ðŁĴ ĥ +con tain +su its +pa irs +to dd +rel la +ro pe +ci o +cro p +paint ings +su z +re jec +bu st +d h +fra ud +m h +contro l +je al +destroy ed +al lows +wo ol +minneso ta +om en +j u +sympo sium +d af +lim it +accoun ts +load ing +inter n +re solution +hol land +qu al +meet ings +gra ve +cam ping +v am +re nov +liber al +am ber +gre e +hu mb +fe ver +el ing +broo ks +à ² +be th +ad ed +al t +ro e +perform ed +jo sh +frank lin +nic ole +de ss +bb s +m g +net works +min im +al t +weap ons +gu y +jas on +g ha +harb our +at on +pra ise +kentu cky +bel fast +st icks +blo ss +ho pes +an thro +famili ar +wa it +ch ile +depre ssion +la x +je ts +le ice +recei ves +si er +an k +de x +inde ed +fle xi +fab ric +lam b +hel icop +am anda +âĢĶ âĢĶ +compe te +sn ack +techno logies +sy rian +mom s +mu ham +cho sen +an at +dev on +shar ks +re t +fundra iser +selfi es +st ations +communic ations +tennes see +tu tor +ro t +valu able +dynam ic +nur se +i ed +earth quake +deser ved +a ve +sar a +stre tch +dougla s +ne pal +à § +ob viously +d ame +ra pe +any body +k w +pat rol +hol ders +h anna +info graphic +ec o +be ating +stan ley +bo ats +ri bb +e z +wit ch +inv a +ac id +boar ding +- @ +gi l +da ve +care ers +opp os +l loy +in ter +do pe +re su +j agu +sh ade +in dy +on ist +rel ations +ag en +ab le +inci dent +me ter +shar ma +id r +pro ve +immedi ately +tro ops +am an +g low +gaz a +blo cks +person al +chron ic +all er +si d +sh r +whats app +lu cy +ar chae +ho u +journ alism +our selves +go t +the med +shap ed +we ak +cas ual +leng th +sla m +ab bey +e v +coun ter +est a +reci pi +cha pel +expan sion +sel f +suff ering +sp ice +n z +sp art +desp er +boo king +quart ers +y on +ðŁĴ Ĺ +p k +continu ed +- # +man hatt +tal ked +sh en +com bo +hybri d +je ans +liqu id +se al +re tweets +ac celer +collec tive +t as +: )) +profession als +ra w +o tt +su san +ir ing +okla homa +re ven +survi val +cre ator +tran sit +st ac +sur f +i k +ed iting +ch illing +bai ley +ste al +ra ble +pa rent +hun ger +sn app +collec t +philos oph +dedic ation +c f +c m +le ep +repe at +re ha +un fortun +a er +a ero +abstr act +mon itor +ag ents +bu l +sci ence +harb or +drag ons +floo ding +ac compli +d ash +juli a +the red +tues day +cy ber +b low +ta ined +le m +refe rence +pp o +ne goti +char le +con nor +au lt +access ories +commissi oner +rain y +re ar +advis ory +luc as +ma id +co al +k av +pol o +ðŁı ¾ +tran sport +mar gare +straw berry +bur ns +gre ens +ne v +partici pants +col in +belgi um +col our +in form +d ell +br on +cal y +kick off +strate gic +re union +hon ors +li b +egy p +âŃIJ ï¸ı +hy po +si zes +regi stered +bet es +relax ing +bloo m +inten se +valent ines +insan e +w wii +p x +tri o +bla de +wiscon sin +con e +plat in +ali ze +ra ven +incre asing +indi ans +il ian +bl u +rabb it +exten sion +je f +au di +fer ry +s ell +a day +us b +swe at +cham pag +metho d +mem ph +assi st +s by +ca pe +remo ved +mag n +v t +r ams +f bi +tack le +phe w +h on +motor cycle +su spec +eleph ant +sub ject +let te +da iry +whe at +awk ward +ac t +tro l +mit ted +zay n +sheri ff +ene my +con s +ke tt +bul ls +ev alu +bt c +satell ite +ho lo +por ter +dia betes +bet ter +rele asing +sur f +: - +se basti +collec ting +en cing +e thi +go ds +al ley +health y +m ills +sma sh +co pper +cr ack +read ers +sp ac +licen se +bas ket +bang la +en tic +om i +m ere +si vely +anim ation +lan es +dent ally +chill in +fi e +k aren +dep th +li pse +n g +ri p +mel o +sand y +ðŁijı ðŁijı +vin cent +nu t +hu g +who le +cre ates +? ??? +âĿ¤ï¸ı âĿ¤ï¸ı +bak ed +up grade +rober ts +har a +carib bean +auth entic +mb s +mosco w +attor ney +wi ki +ch lo +hu ll +cor k +" ! +sty lish +ðŁĵ¸ : +di ary +impro ving +ex pand +bri ght +pollu tion +k nights +person ality +chec ked +fac ilities +z el +bow ling +gu er +ðŁİ Ĥ +on going +un its +hoo k +be ck +confl ict +to dd +far ming +educ ational +k ak +cla y +stro ke +bel ly +explo re +mill enni +th m +loo p +sm s +consi st +cir ca +br yan +d ab +youn ger +soli dar +pp a +experi enced +b ella +bo ard +shef field +steph en +consu mer +sub mit +spon sor +t ang +ag gre +comb ined +trac king +sand ers +b az +survi ve +fer red +equ al +se p +re ed +str ong +priv acy +st ap +un g +ac ry +pa sta +pir ates +ag er +fair y +du p +introduc ed +wi p +let s +spr ay +ðŁĵ º +gre w +a sts +pitts burgh +new york +jo ey +lau ren +tra de +ch op +pi pe +cla ire +behavi or +v ap +cre ws +lap top +ðŁ¤ Ĺ +che ster +disci pl +d f +out doors +k s +go ver +super star +cas ino +far mer +; -) +re turned +ðŁı Ī +ma il +roa sted +co sta +v ill +pe z +gard ening +distribu tion +sh ining +inve stors +ra sp +dec ades +reali zed +bar n +p ti +st able +ut d +pan thers +m ens +b n +ca de +bu cket +yn n +when ever +wa ke +da is +ber nie +lo dge +ju lie +atmo sphere +ðŁĺĺ ðŁĺĺ +major ity +par ti +exc it +cu t +me h +musli ms +be gun +fli ghts +vene ss +ce me +po sing +so le +g ou +dark ness +pe ach +cel tic +auth ority +grand ma +ful ness +smi th +speci fic +gar cia +co ins +good ness +aldu b +recru iting +den nis +gar y +sle eve +weap on +pl z +disco ver +harri son +recruit ment +ja i +ch im +com pared +tom s +mo thers +am y +archi ve +t ask +ben jam +se g +law yer +al um +inve sting +mi e +che z +j p +a ke +fl am +wall paper +âĻ¥ ï¸ı +t ton +che st +favor ites +we igh +coo lest +r ating +relev ant +lo gan +ma ple +run ners +pri or +peop le +ma ur +terrori st +te sted +carni val +su spen +me asure +m v +cyber security +app ren +terror ism +o z +v ital +ni es +gon z +fun ded +twi st +assess ment +die sel +en for +colum n +ad dressing +ca sts +pay ment +x ton +fi er +, ' +la st +ne e +un less +clo se +sk ill +cuis ine +fun eral +ti les +a un +k ru +relation ships +ðŁĴ ¯ +ev ent +âĢįâĻĤ ï¸ı +kind ness +pro posed +acou stic +a es +defen der +dan ce +h tt +w at +vo y +ðŁ¤ ĺ +au s +cli ff +sear ching +beauti fully +in qu +at l +speci alist +ðŁIJ ¶ +da i +tra ils +class ics +inst ant +v ous +re venue +mar ch +kir k +fr inge +fire works +tri via +âĺ ħ +tr action +wal ter +mo to +l ily +att itude +cli mb +sc an +sav ings +c w +fa ith +cred its +ab led +gra ff +auto graph +he he +ran ch +ha d +ro gers +ðŁĮ ¹ +f in +re qu +fol k +ad ditional +lyn n +u ber +dol lars +lo gic +wor th +so m +the sis +p ound +bi c +st ur +cer am +spen cer +en tered +v amp +organi zed +âľ Ī +pp s +tr on +merce des +no ti +compet itive +do w +ous ness +vic tor +gr illed +na i +pu tin +ab ra +bl ame +alex and +anim al +dec ent +p ent +inter ior +:' ) +but ler +bal let +ðŁĴ Ķ +albu ms +down s +la d +si r +pla in +p ers +blon de +dis c +paki stan +se ment +ga a +w age +ch as +man i +co ps +terr it +lo l +lau ghter +ri vers +magnific ent +lam p +w b +new sle +char ts +ble ssing +p unch +lon gest +fl oral +cu tie +fare well +sto pping +mb b +bu d +chee se +de cla +si m +mc donald +de ter +you th +t ch +fre der +kin dle +fer n +at or +as leep +p ond +spr int +p ounds +la zy +gh e +fundra ising +dead ly +gran de +dou g +he y +lin da +consi dering +i um +gol den +vi k +auth ors +di ss +u ally +appropri ate +mor ning +y le +hon oring +foli o +be c +re bec +fin land +formu la +corn wall +sh ay +cau sing +bl end +sig nal +t ent +kash mir +nation als +har mony +sc out +acce ssi +he ight +medi eval +impro vement +ke es +prac tical +car d +de par +hu n +om ing +cal gary +ste l +bu bble +gur u +ma h +unex pe +n h +ed a +me at +i ge +si o +god dess +in ches +tun es +br itt +sti on +ra j +âĻ « +mer cy +ðŁĴ ĺ +sen ds +i est +pol ici +val e +reduc ed +as ap +vi jay +defen sive +celebr ations +ri ders +med itation +har mon +g ing + ¡ +program ming +in au +sud den +m h +replac ement +sk u +j ar +gra des +ta st +k itt +brand ing +k aw +boo t +f ought +p ays +g f +iz ation +ho p +k k +activi st +v end +coast al +cha os +ðŁĶ ´ +se me +bill board +li fting +cu mb +sc al +ðŁĸ ¤ +stru ck +l v +indie dev +beat en +jun gle +al right +destin y +m ing +k c +ch ances +om an +q atar +cra f +tra ined +pri x +char m +o tive +s mu +e c +and ers +hand ed +al ban +certain ly +arri ving +i ze +sa i +tr ack +pain ter +hu mble +appo intment +head line +manag ing +mo d +as pe +andre a +à ¤ +ethi op +un ited +exi st +bal i +k ad +n t +d red +re x +recogni ze +tam pa +be ers +ati a +he els +no te +transport ation +tur tle +re de +hipho p +sp icy +sp urs +⬠ĩ +cor p +ther n +to ast +hur ry +proper ties +ma ge +mar co +ele ments +bou ti +syn drome +ms g +develop er +gra ders +he im +re sil +off ices +del ay +di men +vin tag +barbar a +ðŁĺ ± +vene zu +cu lar +fac ed +bar n +ðŁĺ Ĩ +survi vor +wor m +confu sed +passion ate +Ø ± +identi fy +electr icity +sou ls +brad ley +repor tedly +lun ch +shel f +eli a +swee t +smoo th +emplo yment +am el +manhatt an +ste am +oun ts +ye p +li ving +un e +descri be +ca res +man ila +sha wn +ac ted +bas h +st even +re st +pet ition +div ine +wel sh +rac e +platin um +ðŁĮ ¸ +p b +extra ordinary +solidar ity +m all +on ion +schedu led +game of +fer gu +de ms +nor m +p k +tri als +polici es +publi shing +st ole +fron t +charac ter +van ia +ex ce +sti e +sc a +resi dential +sa iling +ðŁĶ¥ðŁĶ¥ ðŁĶ¥ +spons ors +th ick +champag ne +she pher +continu ing +ven ice +per th +na p +a ster +y ak +un limited +cho ices +ne o +hi v +repor ter +bru ssels +f old +dy s +se mi +la wn +it alia +wi fi +as k +em ed +fr ame +monit oring +ste ad +i da +gr in +is a +fli p +re stric +offen sive +atta ched +di sh +wh y +philli ps +gre et +p als +mix tape +v ou +fiel der +spar k +alber ta +g len +ca sh +s ri +u ri +ro dri +entreprene urs +climate change +p sy +d le +em ents +lin ked +nether lands +acci dentally +oppos ition +vel vet +ra ys +c w +om o +m f +lmfa o +newsle tter +: ) +toi let +liter ature +di sp +phili p +uni form +sudden ly +head er +cool er +-- - +prou d +bri g +nis san +scienti st +j ah +con centr +pac ks +appo inted +so ap +eng age +cho se +âĻ ¡ +se tup +jeal ous +har ry +g ation +tun nel +te mp +osc ars +dec ade +recomm ended +child ren +ab a +anxi ety +ve ments +sal on +pho too +organi z +mach ines +ab s +vil le +hy pe +ti ff +emer ging +av geek +[ # +contribu tion +bra dy +re sto +g mail +fit z +photo shoot +hel met +h t +eleg ant +ug anda +nur sing +or leans +pen n +na h +foo tage +em a +w o +w ad +concer ns +ve re +re mark +who ever +str ang +p t +qu it +sh ang +histor y +s ick +perman ent +ill ness +col d +visi on +he m +ar row +con vic +pin k +oc cup +bal d +ex hau +u of +am o +on t +ãĥ » +adop t +la id +smo ked +inter pre +ess enti +associ ated +b d +bb y +fi er +inst all +dipl om +con diti +c f +w ak +any a +gr aci +fi sher +s ss +ap r +il it +mus ician +symph ony +cor d +h ack +le gi +l v +bless ings +hum or +sc ra +e ti +min ster +trav elling +bu sh +jewell ery +li me +!! ! +pregn ant +pe e +lo b +cap ital +ip a +pen cil +la bor +duc ks +prou dly +wedd ing +dere k +m w +pe g +valent ine +an gu +re treat +pro spect +dang er +vul ner +up set +, # +sr k +x im +thur sday +n fl +kis ses +re ds +cr ack +re ward +c u +ko k +me te +aband oned +it t +me als +sp ell +stan bul +del ays +ru m +le op +gu m +no va +super man +ch ick +m is +dram atic +inno cent +r ounds +re c +auti sm +bangla desh +mor al +mo vie +sp oo +k la +âĥ £ +ou ting +mess i +ab road +loo kin +a im +q i +st ack +colla ge +à ¯ +hud son +sc an +ho e +ch au +oc cur +comm ander +ho les +ðŁİ Ħ +bi as +v on +stick er +ma k +responsi bility +colum bus +sa int +ed mon +rac ism +far ms +w en +gul f +may o +!!!! !!!! +corpor ation +ba chel +el a +inter nal +je ep +fol lows +di alogue +de rer +smart phone +he len +rich mond +equ ity +s land +b g +ne ar +av i +memph is +we ir +discu ssed +bad ge +p up +mi stake +phen omen +un ite +ðŁ Ľ +de pic +ri des +in augu +n at +sof twitter +comb ination +gosp el +âļ ¾ +ad mission +retro gaming +ðŁIJ ¾ +sch u +mb o +jun ction +al arm +à ¦ +gr ac +kh ali +k ul +m ale +cap tion +wi sh +te re +cor ps +ru bber +play station +er in +effici ent +l or +jo kes +in ary +nor man +lu is +inaugu ral +ch ed +âļ½ ï¸ı +di p +to e +str at +aa c +am u +pi er +co tt +comm and +tt en +sn oo +cu be +clo ses +class ical +s word +expre ssion +reach ing +n app +co st +affe ct +ric o +gi f +brea the +tri be +or tho +h ay +l g +fri es +n m +hi ding +richar ds +en de +mic ro +capit ol +cop y +ro m +regi me +mary land +tax i +di al +embar ra +un believ +ch t +v s +elim in +o dd +pen ny +sound track +l ings +trans ition +rema ining +a is +mali k +? !? +rand om +def end +ul tra +tru m +danc er +st ol +dri ve +a ver +ro ast +defin ition +se an +excit ement +partic ul +su rely +sh av +ber y +di shes +com m +is ol +i am +ob li +gho st +hugh es +chi efs +b as +conserv ative +speci al +fe min +sh ri +n ancy +inte l +tu ne +ðŁĩ ª +jo el +gg le +mo to +ðŁĺ Ķ +bu ck +d ag +antic ip +mont ana +gu id +fro g +ec raft +op e +dri ves +nu mer +x y +color ful +wednesday wisdom +illu min +bey on +inau gur +deep ly +pre fer +for tune +coo ked +ti ble +âĺ ķ +swe ater +it ter +tt y +u i +gi e +com plic +~ ~ +tax es +cu ps +di verse +sam anth +âłĢ âłĢ +ba king +sy mp +wa i +be half +mer cur +travel s +ðŁİī ðŁİ +or ia +eng aged +jump ing +reti red +n aked +p uni +speed way +sci ences +rehear sal +on ym +dy ou +pl ates +r ati +kri sh +jaz z +car ol +ra f +pen alty +tim eline +ru by +engine ers +ra f +bel le +do se +che on +esc ap +me g +ran k +or d +me gan +mer ch +ec lipse +âĺº ï¸ı +ple dge +kir k +per si +leice ster +sa k +w k +saf ely +yy y +je t +promis ed +j c +en ne +no ah +re no +re a +ðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤ +tra il +ðŁij Ģ +f d +soo o +ri min +w k +ภ² +i al +x ox +bis cu +d ale +fan dom +particip ating +fla g +privi lege +pe ach +mach ine +bo ston +gro ss +o g +mir acle +adop tion +u ss +mon sters +be ij +clar ke +pu shing +pra ying +ar o +d n +ell is +apol lo +od ds +refuge e +to w +b p +ðŁĩ¬ðŁĩ § +h end +app eared +memb ership +pe an +du m +viol ent +v y +potat oes +aw w +greet ings +t ts +ac on +sh ane +photograph ed +cra b +temper atures +cu ba +c fc +wel com +he l +in nings +m k +co de +kno ck +gra ss +swe dish +p ta +ick y +v at +lin ing +s q +sa p +ar c +announ cing +sk ins +cit yof +br ing +co x +gam er +it arian +i da +h d +ros se +sad ly +ge o +âļ ¡ï¸ı +tag s +fa ther +chan ge +l ance +whis key +adel aide +te c +stick ers +marke t +class y +bad ass +flo rence +lin er +fro st +k ate +ac on +scand al +es sex +ðŁĺ ı +vi vi +dr ill +blo ggers +recomm end +d ha +ac res +ro ma +bu y +gro cer +er ia +ma har +ff er +patter ns +ver i +com pu +st ev +ang a +ment or +do o +it ali +cdn poli +on ly +conduc t +elec tro +de f +wh ale +prepar ation +bicy cle +vi ral +turn out +bra ss +qu ad +hospit ality +pack aging +den cy +ceme tery +abo ard +dre aming +pic ture +t all +inv ent +ad mi +o e +tem ps +qu an +fun dam +pro mp +resi dence +mu d +sour i +âĦ ¢ +graff iti +gi f +d nd +com p +s war +pe eps +pale stine +devil s +san g +assi stance +bi ke +missi ssi +inter viewed +ne phew +dru ms +v and +gentle men +n sw +inst a +leban on +ee ee +oli via +ver y +rou gh +industri es +m ation +ðŁĺ Ĵ +bar rel +n ay +po ps +moder n +ill y +are st +on ents +protec ting +v ans +e o +vi kings +restaur ants +re ck +jac kie +andre w +w illing +he ath +citiz en +disc rimin +๠Ī +stu art +m ys +hi p +tran sp +" ? +te x +su shi +ke d +cro ssed +dist ur +pe dia +f ate +some how +mo th +proce ssing +is s +r in +u ts +yy c +ver t +lg bt +re id +on to +arab ia +habit at += = +stre ak +simp son +addic tion +wim ble +deli vers +challeng ing +ðŁİ ¶ +fran ch +e du +s me +ai ds +hur st +th am +tari an +remem bered +palestin ian +fe es +tru m +sket ch +ur u +fit ting +jes se +ðŁĶ¥ ðŁĶ¥ +---- ---- +ba ch +ici a +colo red +da h +associ ate +int el +s eller +p u +stu ffed +ac s +b s +sh in +cooper ation +certific ate +ab u +ingredi ents +re v +in ge +el der +christi an +bun dle +th ic +dir t +beij ing +comm it +ted dy +ed u +to day +s field +w yn +confir ms +lo o +j v +ene ss +al pha +vir us +ari um +gr ind +bri dges +introduc tion +pol ls +bac ter +z ach +termin al +ra iders +fla vor +zom bie +vo d +sp reading +gameof thrones +effici ency +lat ely +ale m +twee t +cri mes +cl er +de y +dg ed +hy un +pay ments +cir cus +ðŁĺŃ ðŁĺŃ +mis souri +lu b +episo des +c age +po s +mat ching +tumb lr +lin ed +ge st +am bi +nar r +ing ton +regu l +blo wn +is le +co co +on don +joshu a +tour ing +sm a +sau sage +best friend +bo eing +desi re +sav age +ra pper +de vo +te ar +take over +cow boys +po ker +par ag +pp e +h int +we ars +se th +ro les +l anc +man ga +form at +fl yer +c ay +mo or +ba ke +spla sh +v ad +ker ala +proce eds +sil ly +reflec tion +di str +wi d +su it +ci vic +yan kees +by n +migr ation +di stin +or ch +fe mini +quali fying +tu ri +o be +hun dred +cra p +wan g +mathe mat +bu re +expo sure +fergu son +seme ster +re serv +pl ym +a hu +fac ial +wa x +wor ried +ca b +vi o +as a +co d +to pics +p cs +hal o +rescu ed +horiz on +ar k +âļ ª +hol ly +el f +ul ti +pu p +quali fied +attend ance +ati vely +destro y +y c +for th +photoo ftheday +c ents +ic eland +meas ures +de sk +port folio +artic les +direc tors +dat ab +e w +creep y +oun ding +hon oured +mi st +j it +men tioned +port able +iti c +d ann +friday feeling +am id +ti ger +scri p +helicop ter +hard ware +expl or +work place +austri a +beat les +ber nar +spi der +disc o +cul t +lim its +shor tly +fin al +nin ja +lu ke +le bron +wal mart +o il +van illa +shi re +ye g +ak y +c s +bl er +collec ted +t g +rol led +speci als +b ff +pier re +sh im +vi er +flash back +restor ation +individu als +pro d +fre aking +tu rer +o a +re fre +mor oc +gre et +re yn +care ful +our ing +u sh +is d +g ill +vie w +thunder storm +b led +pic nic +guar di +pi g +ar k +syl vania +bann ed +u cl +vi jay +ori um +av engers +believ es +eu r +monu ment +concer ned +la bs +ber g +a ap +vi sh +sing les +can cel +z el +ar ab +ru th +too th +ar ta +sh af +chair s +r ack +dise ases +crow d +cl y +fle x +christ ma +artif icial +tom at +fin e +dra ws +advoc ate +fran ce +Ù Ĭ +ðŁĺ ³ +heav y +s our +compre hen +no ble +aa p +hin du +cor al +g ars +ow en +n l +st all +yel low +mar ina +in ver +suppor t +tou gh +promis es +pi e +master piece +sco re +for ce +mor tg +crypto currency +o x +r ors +rock in +pro vin +ho g +no stal +oak land +pat rick +inclu sion +tra ffic +ah med +a ha +lux ury +con secu +de mon +âĸ º +b lowing +st ag +: " +encoura ge +ben e +sku ll +do dge +bu ster +kin son +wit ne +er ror +lo west +fel low +à ° +sh re +bl ur +vir gin +compos er +sli p +mor nings +ga ins +tab le +gra in +ari st +braz ilian +w we +tu es +ribb on +an ag +di st +sac rif +em brace +entreprene ur +af fili +de o +t ali +touri st +fat al +ì Ĭ +autom atic +ðŁĩ µ +we ak +wel fare +confir m +benjam in +fi ghts +alleg ed +me ad +strugg ling +pro secu +che f +à ¨ +propos al +er n +ðŁĺ Ħ +dy k +on gs +hon g +m ack +mel on +on ent +ru sh +d ap +tol er +pro pag +c ze +trans lation +wal let +cott age +sa il +constitu tion +ðŁĴ Ģ +mun ici +fav or +storm hour +i h +ðŁĺ Į +approach ing +pin ned +j ed +niger ian +n ach +sh at +particul arly +mc don +camer as +anni e +admini str +he at +electr ical +char ming +gib son +bouti que +ex posed +ac tor +pil low +beach es +genu ine +margare t +ben nett +lou isi +pos itions +el y +shin y +ten tion +architec t +ren tal +ac qui +goo gle +sub way +mom ent +ðŁļ ¨ +ri m +metho ds +cy cli +nor folk +Ù Ī +over whel +ra pid +we ar +happy birthday +progre ssive +ðŁĴ ¥ +co gn +pap a +f ool +philosoph y +pol ar +jim my +wi g +ðŁĴ ĭ +oper ating +reduc tion +ph i +fla gs +to the +o di +a res +k oo +k ang +ar kansas +ash ton +wimble don +sci fi +attrac tive +mississi ppi +logi sts +ral ph +la bel +gradu ates +ma ha +home town +âľĮ ï¸ı +foun ded +on the +li z +trans l +mini mum +pre sti +ta m +gener ations +re bel +journ alists +par am +mc m +acry lic +death s +tes la +w t +bry ant +jer us +i stanbul +muham mad +ri ley +k ris +work shops +is o +coun ts +stre t +prote cted +trin ity +man ual +r hin +r il +pleas ant +le mon +ner d +har der +dar ren +bur y +ra h +bas is +mi gu +occa sion +li sts +âĿ¤ï¸ıâĿ¤ï¸ı âĿ¤ï¸ı +e b +de cre +hamp ton +ìĿ ´ +tra vis +trans form +puer to +nh l +av oc +tri ps +unexpe cted +ve t +di dyou +bar ber +st ages +m son +re presented +for t +l al +pp le +nic ely +ignor e +qu il +qu inn +h k +carri er +remin ded +am ong +pass enger +el len +gue z +sc ape +mu ral +youn gest +ma sh +d ill +rout ine +stain less +jack son +gand hi +th al +on ers +edit orial +convers ations +sd ale +autom ation +i ke +า ภ+ðŁĩ ª +hau l +la ying +men tions +am en +abor tion +i bi +coun ties +ca therine +man ds +jam e +roll er +au t +n am +o logical +cep tion +ran king +tox ic +sn acks +victor ian +bang kok +psycho logy +re g +ang ela +respon d +sty le +sophi e +dak ota +achiev ed +mar ked +imper ial +in as +glo ves +sli m +confi dent +att acked +gg er +lon ely +valentine sday +re b +craft beer +orig in +zim bab +ce iling +te ens +other wise +w b +f ers +day sof +advis or +y ah +âĻ ª +en der +republic ans +av a +skir t +pi pel +chi e +jan e +ja x +ðŁĺ ĭ +âľ Ĭ +j ays +bre tt +bal o +cru cial +d har +as is +de au +lloy d +chat ting +âĿĦ ï¸ı +rel ay +remark able +n s +we t +bris bane +ðŁĶ ´ +tion ally +f k +la yer +house hold +consecu tive +es is +pend ant +st ir +crit ic +su gar +photo shop +pa res +arti stic +do dgers +c un +cra fted +am end +bo at +âŃIJ ï¸ı +egyp tian +sa w +tra ge +small er +ox y +pa ired +nex t +i res +tac o +o y +u c +st i +a erial +: // +dr o +dot com +gg ins +r pg +ay e +le an +stri ker +lo bby +prote sts +pri ority +congre ss +am ate +inv it +r ington +mom my +th us +allow ing +pione er +enfor cement +g ori +tal k +dra g +du mb +bul let +san ge +er y +tar gets +ðŁĩ ¦ +he ather +consi der +seaf ood +ve st +ris ks +% . +p g +sac red +he ating +kick ed +tto t +. - +chan di +co ven +po ol +pul se +i a +ro ster +shakespe are +es a +car go +pean ut +tro op +ac tion +tab let +home work +cast le +stru ction +mus icians +free zing +bu tt +justin bieber +j j +bah rain +an them +au dit +didyou know +na vig +guid ance +âĸ ¶ +tur f +n un +fic ations +ye men +char ging +x c +bron cos +su bur +p ale +bor ing +among st +for the +em per +om fg +p j +expe cting +ðŁĴ « +st l +ad min +expect ations +sw an +shoo t +oooo o +min ent +ãĢ IJ +wall ace +stan g +satur day +adop ted +dou bles +hom ie +ome z +d han +vent ure +surroun ding +fi le +mob ility +de es +w ski +broo ke +emb ro +re members +kar a +test im +bo tan +m tv +sacrif ice +jerus alem +d l + ´ +proper ly +ili on +as i +leg it +co pe +m cla +recy cling +lar ger +ðŁĴ ĵ +pat ric +gener ous +ja red +p f +mol ly +thom as +ju dges +h b +sor ts +bl vd +o ven +enter ing +plan es +be et +integr ation +boo ked +fre ed +ver n +ash es +to pped +de pot +welcom ed +ren a +m ick +d and +see ks +gam er +ran kings +ren e +mu t +whis ky +fire fighters +gu es +ga ther +tour ney +de men +y ang +new ton +autom otive +back yard +deta iled +mi st +to bac +fi ber +un usual +grat itude +sp are +ne ys +: * +per i +flo ating +fin alist +don ating +dre ss +bro ad +be the +econom ics +tai wan +ed wards +plu g +pra iri +val en +bab a +f ad +an as +har per +dis order +app lied +p att +bi kin +li ver +cu ri +carol ine +ann er +juli an +wal king +mal col +screen shot +co ding +skin care +activi sts +myster ious +ex act +blo cking +mercur y +bat ter +du mp +âľ Į +en se +li sh +ridic ulous +prote sters +ðŁĻ Ī +lu st +swe at +as s +ali ke +co dy +re ments +win ds +as pir +vi enna +pra y +.. .@ +bo i +cand le +assi sts +te e +der son +p ony +f ence +con spir +âĺħ âĺħ +oo th +e pic +ba rely +a unt +b am +diamon ds +end less +scre ens +can cer +gr o +p st +pro spec +mo sque +help ful +ou ri +bro ther +gu jar +cri sti +ine z +to wers +ad dresses +gra y +bur ton +re tweeted +ðŁ¤ Ķ +n ity +du ck +super vis +jo an +kin der +sanc tu +pi ed +âı ° +ł ï¸ı +m ati +reven ge +ce ster +eli fe +desig ners +back ed +bo li +wei ght +cou ch +su res +s its +shri mp +la gos +auth orities +os ity +hol ly +compu ting +fac tors +ab e +pan els +ram ad +sent ence +missi on +hol m +r b +d ads +shang hai +mon ey +she ets +sk ate +thre w +cup cakes +infin ite +l is +practic ing +ess ay +ka i +as ci +mo b +u gh +hol mes +re gg +ik h +mo ck +collec tions +pe p +o va +sal t +nan dez +co y +thre ats +tex ts +cin nam +pregn ancy +pen ding +stam p +flow er +g is +agre ed +pay ne +ro ver +ph ra +sof t +f fin +fa thers +pass engers +aw ays +al a +h es +li van +in s +samu el +ingu i +h of +j j +chen nai +cat al +om ic +he ath +ni ece +pump ed +integr ated +are l +no m +produc tivity +wan ting +vis a +di ana +tw il +it v +cam ps +ro wing +d ley +black and +gu ards +b ells +re verse +vi be +ric ky +mo ss +ny t +âĺ Ģï¸ı +el le +tro y +cu dd +ev an +women s +fo to +mi stakes +wick ed +mi l +c led +me mes +co smo +schol ar +ren o +ðŁĺ Ģ +v ents +# âĢ¦ +terrori sts +ca sey +cardin als +ðŁĺĬ ðŁĺĬ +venezu ela +bol a +liter acy +t w +en o +con tains +au stin +fin anci +ev an +har vard +origin ally +chev ro +her ald +nott ingham +manag ers +âŀ ¡ +accep ting +wal sh +tutor ial +entrepreneur ship +yach t +requi rements +glen n +pe de +unfortun ately +ach ing +dais y +gi an +night mare +âĿ Ĺ +r ina +b art +ema ils +oppo site +who m +sa ke +pu zzle +da shi +par ty +blan ket +bus es +lo re +beau ty +reas on +pun jab +winds or +func tional +exi sting +hel lo +gli mp +con vin +la k +scre aming +rebec ca +bli ss +north west +infin ity +cosme tics +pul ling +coffe e +pl ing +op ho +colom bia +interior design +( + +emo tions +sa c +sun glasses +sav es +d f +six th +al y +ðŁĺ » +de en +dev ast +polit icians +lac rosse +g u +pe i +jav a +comb ine +coal ition +er ts +survi v +ch ad +stri an +n n +de vi +coun c +concer n +contro ller +bre ast +j ury +tu m +introduc es +la di +mobi le +al z +ste ady +nur ses +h acking +on line +oce an +ðŁİ Ħ +a am +ju ven +ic c +louisi ana +ar te +street art +is on +wn s +fr m +p anda +no ir +main tain +del ay +symp toms +thor n +ge ome +ter n +carri ed +p ru +pan or +as sy +per u +clou d +sp ra +pe di +e ste +tag ged +ðŁĺ Ŀ +shado ws +naz i +ا٠Ħ +cor ri +âĻ¥ âĻ¥ +j ad +ðŁĩ « +form al +spo ken +ðŁĮ ŀ +enjo y +lo pez +out look +in ho +w ander +Ù ħ +ma ya +pe e +d ine +ãĢ ij +brief ing +suppor ter +ar ily +ght ers +natur ally +doctor who +j en +v ar +new year +re se +si mm +re x +con sequ +tomat oes +bur st +bra vo +bur gers +cr acking +nor theast +bi om +mush room +mar que +dou ble +ni er +v ag +tw enty +key board +win ni +jama ica +par ish +: - +mental health +ali zing +ren der +wa king +ðŁİ Ĥ +g ly +na than +wa shing +mel issa +jun g +loy al +chil i +song writer +guit arist +bo wie +neighb ors +onym ous +as set +ta i +head quarters +ðŁĮ Ī +i hear +ci gare +sur g +) " +re pl +dar ling +ðŁĻ Ħ +z ak +sa re +ãħ ĭ +mic key +ware house +mass age +ine es +did nt +i w +hur ts +eng aging +mag ic +women in +k itten +mor s +c art +tit ans +colle ague +compe ting +er an +k hal +mar ble +dem and +del ight +et ary +bli zz +lou ise +m ls +fini shes +experim ent +conduc ted +electr onics +itt ers +car ing +wh ats +sym bol +jun g +e cu +pi x +con text +char ger +ðŁĺ ĩ +re ig +fra g +ë ĭ +ch ad +tru e +ker ry +def ending +a int +au ton +check out +bar nes +less ly +d t +m me +clou dy +second ary +are z +_ : +app a +const ant +" ) +ve ts +jo b +i ent +ðŁĺŃðŁĺŃ ðŁĺŃ +m j +fren ch +di ver +davi es +hh hh +e book +๠ī +mar iti +bree ze +susp ended +mat o +vi et +ra hu +se i +bol t +en ary +le is +kar l +fr amed +expla ining +ab c +de aling +nat o +ja ke +exp and +leon ard +establi shed +du b +ar men +el led +voc al +nichol as +ori ent +k yo +illustr ated +ah h +danc ers +milli on +ge ta +po pp +as u +mur dered +gi ble +sto ked +gri ffin +maxi mum +adri an +en counter +ther o +david son +ðŁį » +holi day +ev o +asse ts +car son +memor able +âļ ½ +ob am +represent ative +cb d +tr icks +vo gue +vo ice +mm mm +sebasti an +cli f +ath y +par alle +ðŁ¤ · +pa k +ev acu +e ats +ا Ø +tou ched +organ ised +spir its +can ad +gui ded +frame work +ðŁĮ Ł +pe d +natur al +ag ar +replac ed +anch or +ti t +sha h +organ is +super ior +r n +ch ro +eric a +st ill +cor on +chu ck +loc ks +or gan +ro sen +sc am +ben ed +/ # +ke en +tre vor +vamp ire +sor ted +! ' +af ford +in tro +gr ace +ðŁĺ ľ +sau r +kick starter +influ en +v u +y up +po c +ðŁİ ¥ +a ar +s ang +tre k +et sy +tb h +scre am +chevro let +pix el +shepher d +an or +gabri el +tw ood +sd cc +me ters +develop ers +clo sure +v w +twit ch +ì Ĺ +se oul +pr ice +ho g +n ish +hill ary +scrat ch +in cen +wag on +dis ability +pan ther +ch ats +g d +wit z +sus sex +l ate +den mark +ger ald +cancel led +net te +i x +nav al +bap tist +te t +y ad +ma th +ho y +r andy +po int +intel lec +fru its +w ool +gu in +pr on +the ft +con dem +mar ry +n ola +architec ts +cin cin +roc kets +gentle man +ex plan +t ate +do e +ra ises +wild life +w l +insi der +blan c +w p +for sale +ny c +po well +unbeliev able +pen s +goo dies +mu stang +p ens +st ays +squ ash +xox o +near by +ever ton +co co +le agu +k han +stu d +south west +con struc +s worth +cro atia +le a +su ms +aim s +e an +van ess +iti ous +pa thy +arc ade +b end +sugge sts +sac ram +roy als +ri er +em ir +in cl +an k +clar k +ri ght +vac c +ठ¾ +tan e +li b +u sc +sal es +hu h +s ally +ver a +p ga +gro ws +dru m +tre e +eth ics +sug gest +is ab +se aled +pre viously +anim ated +ab du +ri ses +glo b +pre dat +scar f +del ic +om ar +ll i +sx sw +py thon +ne bra +fun k +reflec t +pav ilion +tic ally +ch asing +bak ery +inva sion +ko h +believ ed +co hen +con qu +cra fts +nat i +cle ver +govern ance +sam ples +fa ils +â Ķ +ti mo +r itu +stri king +inclu sive +sho cking +can t +requi res +dra wings +à¸ Ń +purch ased +du m +z ach +war ner +con sole +man sion +foun tain +circu m +e sh +is land +mil k +pro fits +hali fax +ri val +âľĪ ï¸ı +jen ny +sand ra +ny e +k elly +y al +qu ad +no s +inste in +fin alists +mid fielder +cu e +excep tional +a an +sa pp +gett in +sa a +f ati +sl ice +vol k +s wal +la sting +sum mary +it as +sm o +s z +âĺ Ĩ +ip l +fl ames +ene ws +ha v +hoo die +pitch er +win dy +re vol +centr al +ton ite +ðŁİī ðŁİī +sol ved +mil wau +organiz ations +wee ts +re fin +s th +ãĥ ¼ +el in +ton a +cinnam on +ðŁİ ¨ +ðŁİ ģ +ron aldo +pen insu +ome ga +el ds +desig ning +e igh +blu et +ben z +nu g +ash a +robo ts +su dan +choo sing +en do +ser ge +clo sely +hand y +fing er +be ing +ar te +survi ved +fl ame +mile stone +gu t +d war +fu tures +é e +el o +fri dge +eli c +ou ch +u b +p v +tit an +col lar +st ation +nev ada +aur ora +r d +dun can +âģ ł +bri en +mar sh +Ð ¾ +to tal +ch ry +s ers +su ffe +ra chel +colle ge +to days +cour ts +ch it +re united +gym na +gen esis +be side +re presentation +ch ant +collec tor +ra k +ath ens +ni gh +mun ich +langu ages +fl u +particip ation +__ _ +c v +spec trum +so da +co ver +refe ren +ab bo +ap a +public ation +ed m +mon ica +ar my +ðŁļ Ģ +div or +dr y +stre ams +robo tics +ci der +bull ying +appro val +sto ke +plat forms +sier ra +ex tin +i b +ha yes +succe ed +suff er +at ically +da i +lyn ch +h ound +del ines +ack now +d ated +exclu sively +he res +fac ilit +dam aged +char ter +la kers +fal con +unve iled +wel ove +e ase +pati ence +l one +gent le +gene tic +produc ing +g our +shann on +bil ities +zimbab we +p int +dau ghters +liter ary +bel le +cl am +surroun ded +k any +ne il +pir ate +rang er +hb d +nat alie +bel ong +olym pi +emb assy +sc ol +en er +ak in +lo ren +b h +: / +di va +den im +hi pp +ðŁĩµ ðŁĩ +arn old +? ' +we ren +em power +dis abled +man or +rasp berry +b af +aw ful +dru mmer +kar dashi +n ash +machine learning +ch u +rebel s +tim ing +mon roe +ton gue +ran ge +pup ils +re ss +amaz on +b z +har ley +pal mer +ballo on +s ings +ic ec +j b +c ers +g ps +whi st +ri se +l t +oo oo +c attle +shoo ter +vod ka +uc l +mt g +le sli +jon as +di spo +at ric +ste in +vintag e +fir ms +flo yd +cow boy +soo oo +is aac +war craft +disney land +beauti ful +be am +franch ise +bu n +k ag +an on +tur bo +swee p +made in +kar achi +dete ctive +penn sylvania +contro versi +vitam in +a side +chron ic +descri bes +remo val +ha h +ap er +ten ed +u to +bad ly +mir ac +f ry +ye a +in jec +ther mal +comp act +th or +te ed +ur gent +l ite +g illi +sop hom +ic o +che m +p m +for k +fre ak +ch ak +recipi ent +i y +ni k +model ing +c ans +ðŁı Ģ +del ux +se am +surviv ors +rad ical +investig ating +reli able +f m +tur t +ligh thouse +to ol +go wn +) ) +bo ts +auto graph +a id +bu ffe +h mm +horri ble +ssi onal +ann i +๠Ģ +k its +sch i +eter nal +hu ss +sens itive +r u +tast es +chec ks +im o +por tion +sk ate +e den +half time +fri ed +ri hanna +ti se +fl ick +ca in +s gt +âľ Ķ +sh au +sta ined +ra ffle +dro ve +sal man +princi ples +sh o +ar u +je ss +gu ine +gar bage +my an +jel ly +dis ru +z ia +q ld +ent ries +la v +fle w +ad mit +objec ts +comp are +ny times +cann es +p n +suff ol +ro c +d ana +e gg +hi st +coun sel +' ! +phy si +imag ination +ad just +explo sion +plym outh +hor ror +elli ott +bour ne +de x +bre ed +au dio +lob ster +disappo inted +nation wide +( ( +incre ases +austr ali +ce dar +star ing +rac ial +e is +g mt +visi ons +stay ed +discu ssions +de an +cur tis +mai den +stel lar +happ iest +h wy +pre season +car av +mon days +hospit als +glimp se +schol ars +ja i +ter race +ann a +goo se +gra ded +lot us +hun g +grocer y +stam ps +emper or +sc oop +in ser +c as +exist ence +he al +fal cons +mar vel +reduc ing +terri fic +magne tic +perfor ms +bar re +p us +tre ating +ic on +w h +decla red +tra uma +do d +come dian +nik on +bu gs +as m +mont gom +ibi za +comprehen sive +ha s +san ti +fellow ship +da sh +p sal +louis ville +sp y +fau lt +d the +fi led +vi sta +de sc +fe ars +you tu +sp s +es p +ri g +cri me +ber ger +wonder land +k ent +in formed +stev ens +my th +ast on +ir i +visit or +at ri +produc ers +al la +person ally +separ ate +agen cies +af ri +il an +spo ke +n ina +squ ad +di ves +de pend +li v +fier ce +enter taining +cha in +sc at +bor ders +pal ette +sp ro +os is +der by +tobac co +zi o +willi e +ju vent +zoo m +hol y +enti rely +af e +mart inez +be ds +pe a +bull dogs +ðŁĩª ðŁĩ +ib m +ne on +ethiop ia +team mates +plan ting +tw er +any time +for bes +ó n +run way +ner vous +ro ger +p ile +ch anc +apo caly +u w +o i +dr ought +territ ory +br ick +cre atures +go in +w aff +gre n +sou theast +je an +am bul +ed ited +stra p +c v +aar on +ãĥ» ãĥ» +t su +descri ption +kin dly +clu tch +im mer +en or +women sday +or ange +ra g +ob vious +hy der +chann els +man go +me yer +ra ining +ge tty +pil gri +coordin ator +up load +ninten do +don uts +san chez +app arel +j r +zz i +, @ +jeff erson +accessi ble +great ly +e id +initi al +budd ha +par is +ma scot +â¬ĩ ï¸ı +sch war +si ri +sp inning +mortg age +e cho +end ange +ge dly +chlo e +enh ance +kar nat +k ry +explo res +ðŁĴ ģ +af fair +ic als +all a +dar t +dolph ins +diffe rences +squir rel +au gh +dr ones +ell en +re store +pa w +un for +pi ke +hil ton +colla b +consu mers +co inci +out comes +pp p +a q +coup on +li est +si ms +k ho +av es +spo on +pu dding +cor byn +hat ers +ex ams +sla ve +. ! +p sa +app les +tam il +se d +co ke +zz o +lo sange +car bon +cla ir +... ) +k hu +cra ig +explor ation +sanctu ary +su e +al way +demen tia +won ders +super hero +pakistan i +brown s +bluet ooth +lo cker +mar c +ev entu +delux e +rodri guez +âĿ¤ âĿ¤ +ro bb +ðŁĴ ¦ +lin ux +ten s +intellig ent +se ed +vo ter +s ler +pe aks +inter n +teen age +peninsu la +hand ling +ti e +cou sins +wen dy +me e +à¹Ģ ภ+din o +ðŁĴ ° +ðŁĺ ĥ +ze e +s bury +trage dy +b k +bo re +z in +war ns +idi ot +tou ching +contin ental +tac os +saf ari +wa shed +po dium +morri son +fore sts +c bc +al on +partic ular +be ads +inv ented +lo ch +li ghter +where ver +i de +docu ments +a we +k r +no where +min er +st it +ro x +contribu te +har dy +cl an +ob ject +ca it +ðŁĴķ ðŁĴķ +happ ier +vege tables +t art +g ag +nom inee +heav ily +pan ic +j d +there sa +at m +u ph +s fc +su ri +drin k +n al +re vel +k l +avoc ado +nom ination +ma donna +shar on +malcol m +control led +sh ers +revi val +legis lation +shoo ts +n in +comm entary +pro s +human rights +str anger +mit ch +pipel ine +leg ally +th u +gil bert +tol l +gran ted +gh s +ir anian +refre shing +du k +ab i +pri me +jose ph +mo sa +stati stics +produc tions +mer ry +pat el +sa x +human itarian +struc tures +e missions +town s +fre el +ster ing +rat ings +alle gedly +cab in +st l +w ade +fl yers +tri m +promis ing +z u +bal lot +compar ison +free ze +ou ter +great ness +as sign +snow y +r ale +tor ies +med iter +kno ck +consult ant +cincin nati +analy st +sc oo +je ws +appro xim +pu re +portra its +cy rus +ation al +lo ans +acqu is +el u +accep table +uni on +water color +ru st +batt les +per fu +seas onal +ser ial +mind set +ri ot +fel d +enni al +clo set +pri est +tan ks +int l +scre w +bu m +ab dul +ou x +expla ined +ric a +imag ing +law yers +bu ried +ãĥ»ãĥ» ãĥ» +ear l +âĢ ķ +l ton +resto red +stri pes +fo ss +de mands +ste aling +alex is +mun d +ak er +ur us +war dro +hu gs +gen re +e go +Ù Ħ +particip ated +bab es +ban quet +ti ous +he mi +ds b +lo st +milwau kee +jen ner +ge m +ou tra +lo ses +id i +re ps +ðŁİ § +regu lation +fla w +f ang +vibr ant +ram p +ra ins +well being +so viet +vie wers +de po +libr aries +bi go +ser y +g ill +de struction +co z +c x +bri dal +al ds +plan ted +amate ur +lu d +che ering +show cas +pro file +i u +ver tical +pack ers +wiz ard +ski p +s light +be au +air ways +mu ch +re ra +ðŁĮ Ĭ +ab sor +pati o +pack ages +s ells +ment ally +ðŁĺ ¢ +reyn olds +k are +tri bun +wal t +kn it +ta ste +sur rey +boun ce +cre ature +b are +bet ting +su re +mi ley +laugh s +al ore +cy n +t l +arti st +ann ah +war mer +dynam ics +lunch time +mariti me +vulner able +ðŁĴ ĥ +wol ver +dur ham +const antly +am in +si bl +: @ +bul let +k ach +angel o +wil der +doo m +desk top +law suit +k ca +hen derson +inv iting +bet ty +ta wards +ra fa +le aked +and i +ge ms +af l +vel o +mediter ran +pro be +to tten +steph anie +sn ation +com be +q s +over come +assas sin +ra v +fil ip +winni peg +sh il +determin ed +k as +ou tre +regre t +gui des +aa a +ðŁĺ Ī +wi ves +mani fe +er ly +sm y +sh ima +x ing +pix el +jac ob +ac commod +to y +on o +po o +ti er +an swe +ðŁĴ ģ +ro sa +le ase +bel ongs +th ar +eventu ally +nei ther +go a +ski ing +at ra +ag h +broad casting +f ury +py ram +d ice +volk swag +wom ens +provi der +bom bs +miss ile +whi p +d ick +nor we +back up +el der +mat ure +concer ts +gi ous +sque e +good morning +bra ves +^ _ +au ssie +lun a +mal es +he ck +for tn +rome o +steel ers +p n +pe er +re presents + « +kat y +migu el +requ ire +cha ins +l ur +immedi ate +ti mber +âĸ¶ ï¸ı +advoc acy +ex port +an z +tiff any +auth or +ðŁİ Ī +du des +chil ly +hi d +har m +bu g +mon ster +terri er +tu c +story telling +ta k +in ti +immigr ants +b is +reach es +com passion +john ny +contribu tions +ðŁIJ ¶ +mechan ical +impre ssion +ran ks +ko be +men ting +bloss om +pab lo +buil der +bom bing +tw el +sul livan +om o +pe te +de mi +ku dos +w bb +t gif +mass ach +neighb or +che fs +eng ines +pun e +ga ined +phan tom +s days +ext end +gr an +cent ers +jac qu +dat asci +sleep y +el vis +answe red +s lot +con y +flexi ble +ti ally +le tics +% , +andre ws +si ble +mom ma +vin o +do x +invit ational +twil ight +j ade +ill ery +joh ns +f ou +p v +-- -> +break down +billi on +prin ter +mon d +c bc +mag gie +legi on +du b +kur t +po or +paren ting +regi ons +bikin i +be ware +si onal +au burn +kid ding +amp les +sp an +con tempor +c ic +ha bits +ak o +pre fe +bud dies +it z +em ily +person nel +moun tain +ver sus +ðŁĺ ¬ +ear ning +s ink +dar i +u u +s win +i ster +bru tal +n ac +kat a +clo th +am and +ðŁĶ Ĺ +ne o +alu min +week ends +nebra ska +co des +delay ed +brun o +pro ven +in c +i ght +fl an +or o +lam bert +regu lat +w f +massach use +kardashi an +bern ard +fi esta +volcan o +grand pa +anc a +d re +st itu +mean ing +fo am +au ck +at ed +r l +hot el +pers ons +dy nasty +ell or +ma i +am ne +sty ling +avi er +e g +vege tarian +, âĢ¦ +foun ders +sta in +g d +cy cles +sky line +trac tor +exi sts +tra l +kid ney +mar il +inst ag +se tte +addic t +tri angle +flash back +controversi al +z on +p ins +i as +tr ay +town ship +deleg ates +sp am +h ms +cr ane +peop les +o lo +fac tion +but es +on ica +deleg ation +new profile +eli er +mc a +w and +g ely +losange les +ber ke +ti ve +dis rup +zz a +cas a +jor dan +ford shire +ga thered +ic hi +atten dees +à¸Ń ภ+pe ppers +co in +bour bon +ern ity +ro tary +behavi our +jere my +team work +compli ance +tre mend +ðŁĩ § +bu hari +cam bo +bu yers +ha gen +bu ds +bay ern +mon te +sm ells +an za +ath lon +descri bed +work force +gi ving +ap i +invest ments +da il +sel ena +datab ase +th um +mor tal +stu dent +bu yer +do ver +gar ten +att le +loy alty +gen oci +holo cau +theat ers +ru ling +ven us +pat ent +ch un +ab by +awa ke +mass acre +bang alore +break ing +simm ons +ju sti +hal e +ed chat +gg les +haw k +mar king +head lines +stro m +co ve +breath taking +med als +hair cut +christ ine +tele graph +gujar at +ju ra +can e +sho re +propag anda +mu eller +.... .... +sa vi +stom ach +thro ws +ta b +war m +j ong +reno wned +hi r +ra is +mush rooms +guaran teed +bo a +m j +revolu tionary +certi fication +bru ins +jo in +w es +pas sport +c g +sex u +cap able +w v +ton es +jac kets +ac compan +spin ach +fore ver +bla ir +wat ts +g l +cou ples +prairi e +newprofile pic +logi stics +massachuse tts +jagu ar +o id +we al +under water +mo z +y i +ma ths +myan mar +pre ps +suffe red +tr ace +wal i +ah hh +bor g +st itch +cu lin +real ise +infe ction +discrimin ation +sh ame +an kle +hu mid +y t +brac ket +tru ck +tri u +ea ster +commun ity +post card +invol ving +ty ler +car amel +over view +ex amples +integr ity +base ment +instru ments +ani um +at us +gh er +laun dry +achi eve +gen eva +pr icing +hyder abad +beli ef +me ta +j aw +accoun ting +lead er +cristi ano +cou ture +cy p +vis ed +, ,, +k nu +h ick +break er +br am +ra b +mo or +ham as +gradu ating +pupp ies +ak h +ta h +ach es +ri e +op ini +g ta +re ign +tra gic +re ver +p ill +pine apple +tou ches +da re +le ys +il o +inter iors +sc outs +bar t +en zie +don o +bro ck +christi ans +ense mble + · +cine mas +new port +air line +win ston +le igh +cont ents +pre scri +ur ge +tr out +fic ally +il ia +sub si +are r +âļ¾ ï¸ı +w ounded +ðŁĻ Ĥ +pe pper +ðŁĴ ŀ +fit ted +af f +re sur +thursday thoughts +z ero +archae ology +di v +je e +i on +awa iting +co zy +beauti es +bal d +dat a +gri zz +stal k +kin ds +cle ared +jess ic +regu lar +ali ens +plac e +bo s +bi zar +thisi s +ðŁĴ Ģ +totten ham +ma fia +s lam +ari ana +car roll +back pack +care y +uni v +r g +pe p +dig it +tatt oos +ag on +volunte ering +diffe ren +consu mption +ka thr +head phones +t shirt +o b +ele ment +re tail +sh ru +al gori +contain er +consci ous +fi l +com ing +ra sh +u rope +def ine +gi or +femini st +flow ing +rout es +gl aci +fer t +somer set +ant es +twee ps +$ $ +h our +endange red +year sof +ro h +po pped +bac king +ba sil +bra ke +mon aco +lgbt q +pra gue +ut ility +cas si +gate way +haun ted +sch ul +ðŁİ µ +shou ld +walking dead +comple ting +dann y +montgom ery +pengu in +ss i +mer chandi +ðŁij ij +chur ch +h ates +cap tain +brea thing +ce t +fair ly +approach es +compan ion +surpri sing +kany e +pe y +hin di +targe ted +lor ds +de ut +di gging +ger man +ru t +ener gy +close st +y un +apo logi +ภ± +s ack +ru p +dd y +port al +d ough +b ats +ðŁĵ ° +at ur +graph er +pi res +mo tors +ðŁĮ ¹ +j c +dan g +tu k +clu e +us c +pag e +d less +bro ws +ju s +ad ing +re marks +oo m +car dio +ste fan +arm strong +âĢ¢ âĢ¢ +ni est +belgi an +bi op +so y +lo f +í ĥ +q t +flashback friday +ce e +ģ ภ+wre ck +mar ines +amend ment +wardro be +vo y +bur ned +guit ars +ra inf +li fel +ssi l +oun ce +exter nal +c key +me sh +she ikh +inv itation +sugge sti +pop corn +phenomen al +an onymous +tun a +chic ago +o val +del y +loc als +( & +pro f +no vel +fin der +spar ks +la ven +in fu +nic ks +qu ant +ra e +exe c +dist ingui +st ances +mu tual +sh al +unve ils +edmon ton +zan ia +a dio +vie wer +brad ford +audit orium +qu is +re act +htt p +l ero +chee ky +impac ts +ta k +ed t +desper ate +t ay +ì Ħ +sett le +bar gain +resu me +un ite +thro wn +ke st +se ys +mar ching +am it +decl ine +sch ar +me tr +stan ford +lin ke +ber ra +dol ls +rug by +jam i +b or +road trip +dino saur +mi k +sun der +re m +b k +over seas +nau ghty +imple mentation +iam srk +lun cheon +fir ing +mi ami +pere z +the e +z on +gi fted +con version +ceram ic +¡ ï¸ı +pe dro +ì Ĩ +v ick +! @ +he ed +si d +b w +docu ment +pl un +gr ants +fant asy +predic tions +vali d +car ved +gradu ated +ðŁijį ðŁı» +nation ally +ch y +af l +re sso +blan k +ri vals +j ig +e ties +om ics +une mp +b ound +sk o +inspec tion +par al +high s +cri sp +b ans +ob a +[ @ +co spla +costu mes +rec all +mou th +ni gel +b ts +ter a +ko v +do cs +west minster +dic t +gra vity +kar i +ro gue +t ted +war k +ida ho +w end +aw i +queen sland +proce sses +cli ffe +m ick +com pens +op ol +the y +cl ari +wiki pedia +salman khan +haz ard +pre ston +swee test +pd f +che es +tr ilo +south africa +bur nt +( $ +con tain +t p +sub mitted +sound cloud +at u +re z +word press +corru pt +n f +ma ker +í ķ +par as +adv ent +ri al +ca fe +fo ssil +!!!! !!! +co ws +c j +sp ur +institu tions +land mark +ent it +re ut +h is +alz heim +we mb +regg ae +mo squ +st at +identi fied +deal er +re am +re land +ten sion +ðŁĩ © +wra pping +deep er +fr at +red dit +ar is +moroc co +.. " +b low +ma pping +pri orities +ing a +swa p +re wards +conspir acy +creati ve +c j +congre ssional +vau lt +ple x +sophom ore +shad ow +ele ss +ðŁĺ ħ +dar ts +aldu b +anno ying +pro ps +n as +alumin um +h bo +offen se +j ill +oni ons +la ur +ta e +har dest +sh ro +ga ining +meas ure +ed tech +cyp rus +tar a +ang eli +car lo +go on +all i +im plic +ju pit +resil ience +ha il +bal anced +) ... +joy ce +gr a +th eli +defin ed +shi pped +main ly +min a +l m +sac ri +o ber +p im +claim ing +ent ers +co rey +bo k +cri ed +cool ing +dani elle +pharmac y +thor ough +ca ke +k lo +outre ach +z ens +digital marketing +val ent +sn p +her b +mr w +caf é +cap tures +no tre +triu mph +pan cakes +cu mber +spi ke +d ation +bi gg +sp er +crit ical +am al +too th +foun ding +a stro +' # +quan tum +th ames +un c +pri de +air bus +kno cked +un defeated +mediterran ean +cal cu +clo wn +sens or +ham mer +for give +cu shi +ber ry +maje stic +elec t +polit an +g ta +k ari +bur ke +sea hawks +volkswag en +re i +landsc apes +cas u +grand father +list ened +/ / +star trek +rainf all +fur ry +vi er +star k +rif le +ff a +leg es +hillary clinton +min us +correc tly +architec tural +pre ce +up side +box er +ðŁĻĮ ðŁı¼ +is ai +de t +pro vo +tis sue +spoo ky +ve led +re con +prospec ts +que bec +âļ « +ig no +anat omy +shap es +w p +p interest +hor e +an es +pick up +ti p +pra desh +hu gh +co e +po k +gram my +well ington +sti gate +ri gh +lea p +king ston +scen ic +go sh +v ani +au g +s ary +zi er +bure au +lin son +con te +fra gr +all an +g aw +lan a +colli sion +surve ill +ren ais +ar range +s ali +do in +br ance +bren dan +our se +in coming +suspen sion +à ´ +l la +educ ators +in tri +da e +bio graphy +bul gar +villa in +go thic +rw anda +e w +may or +meet up +democr at +mor gan +su dden +te sco +car rot +bom ber +mck in +re ne +fun day +agricul tural +haha h +show time +form ing +col a +scor pi +quo te +po ppy +s life +d az +tu b +ne n +mo t +ðŁĺ » +s ore +elder ly +o ve +skin ny +um i +anc o +man ship +we re +g v +k ah +fol ding +ne at +samanth a +dan ish +uk rain +humid ity +nu tri +jak arta +cand les +oooo oooo +at ile +streng th +i bra +bap ti +charle ston +fr ames +girl s +clear ing +glu ten +# # +super natural +ju bi +ph one +he in +dr un +le ak +invest or +y er +dom ain +ball room +mi sh +app li +off shore +bla ze +dor o +âĺķ ï¸ı +win ery +shar if +ad ore +n ir +saf er +si gh +as cri +strong ly +trac y +ck er +ol l +faith ful +ey ed +deli ghtful +vis m +karnat aka +tit an +wh ar +jer seys +re fur +heav en +gri p +pan ama +pre li +glu ten +o dd +cont ent +pon ti +tion ing +e commerce +feder ation +flaw less +ge ar +ti res +by r +pol ice +cu ban +tri butes +tic ul +chur ches +nur sery +di aries +muse ums +snapp ed +i van +wi ght +touri sts +ramad an +t rent +prophe t +won dered +focu sing +hi d +ic ons +i q +ambul ance +pi st +fun niest +time less +sr ilan +bu ys +ki ds +colour ful +a shi +ch ir +mu m +ðŁĵ ļ +let ter +x en +reut ers +pre serve +in ting +ste p +fu ji +uni ver +i u +show down +po ems +surveill ance +suspec ted +ta e +sol ving +tom b +mother sday +car pen +recru it +pil ots +bro c +mix ing +fri days +ty r +represent atives +tra pped +abdu l +free style +clu ster +âļ łï¸ı +k d +sk ill +pit t +ex o +commer ci +muse um +loc ally +g ina +no bel +immun e +fr ac +cap su +main ed +attemp ts +bull dog +be spoke +sing ers +sp elling +seg ment +nat ures +tic k +lip stick +clean er +gett able +preci sion +âĢ¼ ï¸ı +th ood +re ef +no pe +bill y +di gi +mu si +ri val +figu red +tal ity +sun ny +ber k +aw ww +awa its +un real +co pen +asy lum +ex otic +bu en +mo ck +en able +arch y +fr a +pla stic +al mond +amp li +displa ys +abbo tt +s me +x p +ðŁĻ ĥ +graph ic +i ved +mar a +cau tion +lea ks +en berg +ul u +unic orn +cann on +appren tic +ðŁĺĺ ðŁĺĺ +b ball +wil low +at ics +am as +manufac turer +campaig ns +port ers +flo ors +l su +ty pe +ke j +honor ary +it im +to le +min ecraft +d x +ma sh +ri o +consequ ences +ron ald +go ssi +suffol k +mu se +r bi +live music +i van +ðŁİ ¤ +le u +patri ot +man it +lan ca +home decor +de ar +sig ma +ti de +str ings +v ita +sequ el +try na +inve stigate +bor is +ve gan +barri er +mind fulness +web b +hu stle +in da +tan zania +str ay +tex as +c ag +diagno sis +wom an +g w +ob session +l ative +nu fc +fl ynn +moment um +sof a +wal d +vege table +tu cker +supp er +se ab +ar ro +se ag +ven ting +counc ill +sp lat +cal cul +.. # +com fy +odi sha +sto pp +war fare +ca es +à ¨ +co y +price less +in sec +ðŁĺ Ľ +contro ls +empower ment +datasci ence +per pe +gen ic +e res +tru deau +man o +sla very +expand ing +ma he +fa iling +s aga +photograph s +cre st +re on +surf ing +hi e +ðŁį Ģ +ja e +fel lows +south ampton +sol om +ce ster +tab ility +hor n +se ct +he e +cole man +at las +explo rer +consul tation +copy right +organi zing +den ied +mon keys +noo dles +br is +fl or +dou gh +bon ds +sho cked +eco system +care fully +w m +apart ments +cur ve +san diego +must ard +comm en +cere mon +e ch +ru th +ðŁĻĮ ðŁı» +hawa i +fil med +te ar +as ingly +ca ir +wat t +instru ment +ou tta +ye ol +river side +ë ° +. : +nor wich +alo g +migr ants +new man +ri de +spr ink +targe ting +beli eve +tor ch +reflec ts +per mission +ff man +ene mies +bas ics +se ized +sun days +le i +hass an +en do +h c +st ad +le ments +kk kk +nan o +shar k +man a +on ic +treat ments +ear ly +collabor ative +shu ttle +bran ches +mis ses +mained cm +ap ers +ky le +carri e +leis ure +sh et +bir ding +adv ances +ðŁĵ Ŀ +popu lar +di ane +a be +re war +neigh bour +k pop +remem brance +play ground +ru b +krish na +e bola +inqu iry +ep a +lu min +organ isation +abra ham +norm ally +pre ten +jan et +w t +ðŁĴ İ +encoura ging +a stic +bu mp +syd ney +s z +ss ss +gar rett +ðŁĵ » +consul ting +roman ia +spo tting +chanc ellor +ar ma +presti gious +ðĿ IJ +t ad +cry st +compe tit +rati o +cat aly +bro w +j ur +vi king +commu te +y day +la yers +du mb +esc al +genoci de +f ill +gu pta +ste pping +se i +fo to +wild cats +col i +projec t +ear nings +st r +ge ons +comple tion +b m +decor ated +craw ford +af ghan +sc are +visi bility +hi b +direc tion +stro ll +christ ina +alter nate +cl are +sty list +be hold +s ance +leop ard +acqui red +narr ative +ash i +the a +?? ?? +pe as +at ch +sli des +le en +renew able +eng lish +qu ir +co aster +r x +fo ols +match day +mis m +amaz ing +z ig +ke ting +won t +to wel +di ab +sta ke +n m +mel t +e than +gra pe +polit ician +sm en +í ĺ +re o +wedd ings +cat cher +or acle +me mo +ðŁĮ ´ +ec k +rob bie +norwe gian +oper ator +am or +se wing +ju l +x ie +u v +fif ty +me ga +tatt oo +liber als +u pri +traffic king +richard son +su v +ki p +mess y +tremend ous +gl ou +cour tney +la d +stere o +my ers +i dio +^_ ^ +man ning +dy e +w d +thr one +jun k +as u +provin cial +k ook +wr c +fine art +hamp shire +renais sance +b red +fall out +s j +sn l +al am +tor ture +fy i +sh ines +pa w +ch ar +hen ry +c row +aci ous +di an +pa ige +ba re +stock holm +scen ery +ðŁĩ · +jef frey +pu sh +decor ation +ne d +cu te +brig ade +laven der +inv ites +e sports +vo ir +dri ed +tran spl +sur geon +no vels +pul ls +son y +lun ar +man e +i vy +fru str +dor set +sa i +tor res +ssi on +shut down +suggesti ons +writ ing +e o +battle field +u ga +ðŁIJ ¾ +vac u +spl ac +g it +u g +high land +% ) +mer maid +sacram ento +ta ils +p w +ka h +t ell +enh anced +ì ķ +auck land +cru el +ðŁ¤ © +au dre +sail or +gram mar +g love +de on +infl am +fresh ly +k ell +zi p +christi e +mil d +di xon +instru ctor +g ence +ãħ ł +sub jec +constitu tional +crow ds +in visible +ru ins +da k +si p +pla que +p ouring +comple x +z ine +ste ad +f let +trans mission +lo way +ar un +incre asingly +au d +transp aren +cro wned +sc oun +blizz ard +lux u +fi ers +achieve ments +hun ters +rock ed +bas in +vio let +pro ves +achiev ing +pro sper +se ga +flo at +vi an +xi v +pol ic +tur a +approxim ately +wander lust +keep ers +geta way +co d +pol is +br yan +col ts +tal ents +yo gur +gluten free +wri st +gr y +cze ch +ðŁİ Ī +ev ille +ðŁı Ī +to x +dani els +am er +bi ds +weare one +me tab +g t +boy z +pd x +pos session +pu shed +shr ine +reali stic +tri gger +na vi +ru mors +n af +jen kins +tr un +comm uni +Ã Ĺ +gam ers +arm or +moham med +bal cony +y ah +stron gest +rhy thm +unfor gettable +k p +ho bb +custo dy +greg or +r ita +aes thetic +il ation +sponsor ing +n ay +kid napp +sh s +ra jas +me g +signific antly +butt ons +la c +ver sions +essenti als +opini ons +k ro +d printing +wi dely +d k +ur an +y al +reque sted +c n +cur ric +plu m +gr un +v m +dev on +m yo +rel ation +juvent us +rou ge +min ority +min es +jupit er +n ine +oxy gen +fran kie +une sco +fab ric +disgu sting +sal man +dete ction +lan ka +d ac +ðŁĩ« ðŁĩ· +argu ment +shel ves +cel tics +rober to +pi gs +he dge +fau l +pow ering +butter flies +fi r +re make +att i +com o +emp ha +kend all +poke mon +se ating +d ans +bald win +ðŁij » +lesli e +one direction +ti mber +im an +fon t +e der +di on +ste ph +for mat +gre gory +pro p +he x +ru in +sor y +inf er +n aw +bar ak +sd gs +kar ao +lu sh +v ander +end ent +g is +a fro +soc cer +ay an +t uni +lun g +da yof +alex a +mar ath +addic ted +ag ile +hy gi +light weight +ì § +mand ela +jo ey +anc y +hu m +bi r +memor ial +jim in +ging er +v ak +jav ascri +cro ps +orig ins +d ari +pi per +im port +aggre ssive +predic tion +re pairs +cr acker +voy age +ni ke +mu mmy +linke din +country side +bor der +gla ss +per t +s als +sho e +autograph ed +wal nut +colle gi +sal ary +pa iring +ðŁĮ ¸ +cath ol +swee the +defe ats +streng then +roof top +impro vements +barri ers +ur u +t ally +ru led +ðŁĨ ļ +nai ja +emo ji +per cent +gi o +pro bs +on ce +adm its +pa ths +li ar +day tona +pe ters +cal i +cal li +mu g +o sa +ap h +ab y +hy de +eth nic +pla ins +ol f +haha hahaha +holi c +?! ?! +su bli +bl acks +mo t +gh ton +lo vin +b rent +bar u +l ati +de w +ate au +q a +pain ful +bu sters +st atic +ðŁĩ¨ðŁĩ ¦ +note book +out fits +si es +r f +floo ds +Ñ Ģ +thro at +su ici +ro vers +beng al +pre pares +blo g +mini ature +Ø ¨ +am phi +com b +r sp +in timate +green e +Ì ĩ +al tar +surg ical +ves sel +... ? +gav in +g ator +threat ened +z ar +rob bery +di er +promo ted +y g +x s +su bs +inter viewing +threat ening +do zen +me ado +water fall +nintendo switch +cal um +mini sters +dro p +univers ities +war ned +tac tics +ðŁĩ ² +refu se +ad ju +v ast +ðŁĺ ´ +mc fc +lib ya +no filter +distribu ted +re ser +ron nie +de co +javascri pt +mon k +intere sts +fle x +mar tha +sti es +oo d +ðŁ¤£ ðŁ¤£ +e un +b ali +g omez +sti mul +moder ate +d ity +ir is +stra w +consist ent +direc tions +adop t +sal sa +cro o +reco vered +black friday +lan caster +accep t +weareone exo +buil ds +free man +air plane +diti on +bel ong +jam ie +pit ching +li f +om in +cri spy +pre pping +ve g +chan g +accompli shed +graci as +dolph in +elec tor +culin ary +super bowl +wal a +pur suit +black berry +be an +cardin al +pro ved +immigr ant +stric tly +holocau st +pass age +ha us +cou p +pur se +har ass +< < +le ed +ado be +st ad +legis lat +par ked +pri yan +sil va +kri st +s the +fun ky +ig a +sett lement +ph s +t mrw +stre ssed +hun t +ho ckey +treas ures +cham bers +ol u +hu t +mar ley +tex ture +wilder ness +mm ing +poten tially +om aha +ju dy +to es +spo iler +distingui shed +feli x +ah u +recommend ations +zom bies +hit ler +tri ple +colla pse +motiv ated +ulti mat +gg ling +so y +ci gar +fo ren +vine yard +gl itter +fin dings +colon ial +hun ter +eri k +den s +beet le +lot te +sub tle +s matter +tru sted +experim ental +nam ents +ðŁĺ Ĩ +regi on +acquis ition +bre eding +quarter back +am reading +oo td +ru de +initi atives +st out +hy ung +out come +al fred +mic s +exper tise +bacter ia +pengu ins +jump er +valen cia +bar k +ing day +sell ers +contrac ts +hou ston +commissi oned +adap tation +swan sea +santi ago +common wealth +ju dging +sub mission +sco rer +tom my +ñ o +ex quis +fil ing +explan ation +alli son +wemb ley +ri dge +chev y +san tos +own ership +cogn itive +favour ites +sh ed +phil anthro +dele ted +go dd +s nor +gui delines +ff ing +je ep +cli ps +sw amp +an or +guil d +bol ton +spring field +munici pal +goal keeper +ye on +ðŁĺįðŁĺį ðŁĺįðŁĺį +ãħĭ ãħĭ +water front +gra ve +contempor ary +ar ity +ÃŃ a +sle eps +sy rup +al am +pi re +co yo +moto gp +ty son +kej ri +cir cul +sing ly +cr unch +complic ated +nostal gia +k op +mo ve +k ale +mac ro +mid west +h ans +tri bal +nu de +௠į +bey once +congratul ate +cat er +leagu e +ðŁĻ Ĭ +la dder +cra shed +tech nic +karao ke +harass ment +ro ts +experi encing +kri sten +ðŁĩ ³ +ðŁ¤ Ĺ +reflec tions +guin ness +illustr ator +ðŁĻı ðŁı» +cen ter +nar row +comm ons +regul ations +Ù Ĩ +har m +cro ft +cu ssion +hong kong +st ical +intern ship +zo e +cho p +hoo ds +estim ated +batter ies +berke ley +smooth ie +shau n +cro s +~ ~ +cam pe +hu mp +b g +proto type +cl ick +shaw n +re viewed +tem pl +p f +jed i +blo gs +ray mond +as th +ba h +av ail +scot ch +leaf s +nik ki +to k +hol low +ur ges +of t +un like +lat in +u e +cat ering +mil i +alter nati +ma ver +Ð ¸ +ag le +pre order +lu x +cu cu +ðŁijı ðŁijı +t art +âĿ¤âĿ¤ âĿ¤ +arab ic +rapi dly +ar rang +all en +travel tuesday +pa ws +flo ws +st ability +flu id +ca pp +can berra +uu uu +sp ani +demon stration +m la +plac ement +m w +presi dents +awe som +bever ly +ani st +ne al +father sday +referen dum +la hore +o aks +deb bie +half way +gho sts +de bor +matthe ws +fi at +t fw +pre sen +rob i +de d +bro ck +laugh ed +am ounts +bam boo +kinder garten +eat en +mtv hottest +break out +u sic +fra ser +legis lative +p ang +modu le +sam my +go ver +ear ns +expe dition +gar h +concep ts +char lie +la va +bachel or +veg gies +deter mine +el lie +un locked +fru it +dal la +cou pe +wash ington +depo sit +iv ory +pau la +chic ag +gu cci +ðŁİ ĥ +cul tiv +pier ce +li fted +stu mb +re cover +musc les +conduc ting +cb s +mcla ren +sophi a +cel lu +oce ans +up loaded +game play +mal dives +kim ber +avo i +rac er +ca ine +cav s +h ana +li ga +ra ven +inter vention +inaugur ation +oo h +at traction +merchandi se +tune in +li king +juni ors +int ended +att acking +aqu arium +i wd +comp onents +sur ing +cent u +yogur t +ðŁı ĥ +show room +op tical +ty our +ju dge +yi eld +an to +pl c +transparen cy +recy cled +chi ef +ar om +ambassad ors +plan et +âĿĦ ï¸ı +om ed +vaness a +cour t +mar gar +hal ey +v r +reg ina +pd ates +hi span +live stream +âģ £ +ya hoo +gal la +secu red +w ir +bene ath +off l +n il +am b +ye g +out let +u te +pe ep +lind say +bent ley +... ! +he el +trilo gy +vo s +ty re +there fore +tor onto +ab i +simp li +ja e +exten sive +eleph ants +s or +orient ation +im peach +re play +constru cted +peter son +pa is +por ted +custom s +colla p +ad u +high lands +sal em +shel by +ko vic +stra in +ro sie +sen ators +snap s +bo bb +suz uki +bla des +k p +lo lo +gener ate +si ght +ma e +struc tural +predic t +jump ed +ah mad +sun g +just ice +gla m +vol vo +jubi lee +de tention +lo sses +pu ri +every time +Ð ° +ra o +ed ge +li mer +rese mb +har old +re tri +sacri fic +surpri ses +am c +srilan ka +bar bie +men s +fin n +ag s +ukrain ian +em brac +î IJ +flav ors +hom er +lau re +ou th +pr iced +ver de +fir m +ah s +cu b +tre y +par anor +pro fit +in dv +who a +har sh +al ot +crit ics +hu bby +fi gur +gi ra +ca stro +chan el +in put +origin als +ten ant +yy yy +ture rs +lincol n +co on +lear n +ch ou +ac are +o les +din er +hy p +bizar re +mc r +let sgo +decor ating +ðŁĮ İ +al ison +ar vin +f d +reha b +mccar thy +lot tery +da h +minne apolis +eli gible +diagno sed +emer ald +destin ations +s ans +or y +bla zers +n v +ba il +digital art +no c +mal ta +sol ar +pi pes +alleg ations +no ck +po pe +bri d +premi er +n x +present ations +ef a +bo ws +val ve +opp onent +Į ë +visu al +ing le +cate gor +e ter +po is +dan i +at tract +neu tral +th ene +cra shes +fred die +ut ili +c st +awak ening +slo ven +quali fy +pro of +fair y +le v +fre ight +enjo ys +cup cake +flav our +â ķ +protec tive +ðŁijı ðŁı» +is u +ad mir +h mmm +continu ous +ai res +rap tors +showcas ing +y uk +pa ste +follow er +instru ctions +sp ru +@ __ +the o +debu ts +ve tte +sto w +es of +ach ed +sul tan +sand wich +som alia +franc o +car ne +flu ffy +al pine +jas mine +he ated +viol in +ple ss +divor ce +per former +phi es +port sm +dar a +kir by +lo p +chill i +for th +sky pe +ðŁĩ®ðŁĩ ¹ +celebr ities +ed y +ve e +po ison +ey el +gra bs +ssi c +un o +wester n +rail road +am er +numer ous +s v +fo w +fi st +âĢ ĭ +reque sts +mar tial +em my +accept ance +lau ra +ภ´ +er up +hyun dai +out lander +u tt +wrest le +esp resso +demand ing +g dp +geo graphy +sas kat +tro ll +confe der +su es +se m +be ts +t ful +to sh +teach es +col oured +gal way +mac y +dis orders +bb cra +at em +fen der +lit ter +e sh +provi ders +renov ation +nomin ate +ps g +nomin ations +jen na +shar p +some day +z ur +bra ins +che shire +pre y +hu go + ¿ +to ken +r v +car r +tac tical +zel da +kay la +fern ando +photograph ers +j our +umb rella +woo dy +congress man +du mp +le vy +ju an +d azz +sign als +la in +an u +mic hel +por ch +al den +sibl ings +y ale +pe el +sw ick +gg in +ll c +k ale +s con +il d +pat reon +re el +qu in +wit t +mar ty +moo dy +ton i +der y +g ators +speci fically +dd in +ly on +tr ick +meado ws +p j +bor gh +vi k +tu r +bron x +pu ff +lan tern +ðŁ¤ ¦ +g ently +be stie +fac t +refu sed +fas ci +mp y +ðŁĶ µ +cross over +mead ow +indian apolis +duc ation +sle y +loo m +mix er +new music +film maker +prosper ity +li m +week end +cre amy +neu tr +lu ther +h v +nor thern +tw o +h ra +cat ches +appear ances +ha bit +kitt ens +n v +illa c +inf an +regar dless +liz ard +dun k +cur tain +ac om +in tu +ve z +e min +fl ats +calend ars +em power +ru ined +hun gary +vi d +we x +u lum +aber deen +o sa +k t +ma ssi +se emed +s den +' ? +tele phone +de fi +insp ires +me ow +z ones +bl ind +pl y +tuc son +advent ure +ge d +oy ster +ðŁijıðŁijı ðŁijı +out put +tt t +metal lic +sma sh +ucl a +sco ts +perfe ct +lu cy +regular ly +sp ic +rel ative +ath ers +mis e +batt ling +deci des +mat a +occu pied +random ly +cat softwitter +gi an +ball y +al ties +al lies +im men +sy rac +ðŁĴľ ðŁĴľ +l lan +au r +k ut +lam ar +affe cts +n ra +star war +ðŁ¤ ĺ +sc ram +en chan +pro cess +luxu rious +ar ray +sher lock +comp ati +dor f +stre ss +m su +s with +sal a +sof instagram +fo il +under stood +qu ay +r p +c ade +ja w +en ab +en coun +ðŁİī : +do ck +satur n +mu ll +lay out +ra rely +happ ily +fix ture +or ph +over looking +her bs +m itt +pil lar +nol an +pe tty +str y +u i +mu k +o res +o vers +á µ +re creation +we sley +ri t +kejri wal +sto cking +g v +subscri bers +moo se +ma e +ber t +opp re +assign ment +u ro +high lighting +cal vin +we igh +cambo dia +av on +ke m +dis abilities +read y +char gers +p ads +iz ing +illi an +tru ste +col leges +associ ates +alban y +mil ton +cr on +bu r +har dly +si ghts +anti ques +e cho +surpri singly +ha iti +cap t +ph p +op io +ine quality +equ al +ken y +sch mid +autograph s +ren t +qu er +cit rus +challeng ed +te c +epi de +fe st +z hou +li me +citizen ship +cry stal +convin ced +mess enger +copen hagen +âĿĹ ï¸ı +war ran +develop ments +ï¸ı âĥ£ +fore x +hi ro +sne akers +xi de +vi va +stere o +bat ting +ss el +ho st +beng al +critic ism +q c +cr un +attemp ted +ry e +determin ation +cre ations +d read +label s +pos se +anc er +joh an +si ster +partner ships +les bian +k st +guaran tee +bar o +fix ing +ma son +m ous +chem icals +t less +bio diversity +par o +bhar at +ac ol +refu ge +en te +t iti +dys sey +respon ds +lef to +in er +se vel +rahu l +ol ine +frank fur +cho reo +enjoy able +c to +strugg les +wood land +heavy weight +gen s +rece p +ac cred +ðŁĺ ¡ +trans formed +list en +at op +n k +sur ge +be re +gover nor +prison ers +clau de +t ill +mu lator +emo tion +water loo +star t +ðŁĩ º +clean ed +grand mother +fear less +afric an +astron omy +ðŁı ģ +ภĻ +the world +su itable +anth ony +k and +tt en +meaning ful +disc lo +jaco bs +à ¸ +tom linson +ghe tti +ty pho +sub stan +as co +te k +nag ar +mu d +am on +vacc ine +f ty +fle sh +no el +infl ation +portu gue +glam our +tra m +v re +te qu +roun dup +w yn +rejec ted +mosa ic +si ghting +cal f +o ta +com position +go pro +gonz ale +e ed +b ard +tu e +effec tively +we en +al to +ri bs +rel ate +thir sty +fu rious +di m +ch ard +perfu me +s ny +chur chill +k of +master class +wa ve +ðŁĶ µ +er in +own s +to be +sk illed +te m +go f +en i +tor i +cra zy +l ick +resi stant +ici al +ag ar +! : +g ali +del aware +bl itz +koh li +pu ck +avail ability +hi malay +influ ential +cro chet +victor i +read ing +ho bby +vie t +j as +en gra +sk ul +ðŁĩ² ðŁĩ +educ ate +tech no +distric ts +blu es +se tt +seven th +lear ns +ee ee +apocaly pse +hang out +cru el +mu tu +bru h +hel en +she er +c tion +kle in +tex ans +ce real +sh ine +ne red +gra s +am bro +f ella +hin du +matthe w +li ma +mir anda +je wel +so ho +euro vision +neighb ours +chand ler +be sides +ðŁ¥ ° +ast ros +thu mbs +ren ault +ra ve +hi red +ðŁĸ ¤ +it ary +z or +bla zer +k ine +ea u +kat y +dc comics +pe c +ro dgers +water proof +kill ers +super int +pre serv +as so +brew ers +promo tional +sc am +villa ges +sket ches +ju icy +for life +au dit +so lo +fundam ental +len e +philipp ine +t end +conserv atives +sponsor ship +dd le +a ine +h tc +os i +hul k +w af +ภĻ +evalu ation +ant ine +sle e +robert son +roo sevel +ag i +sophi stic +emplo yers +bubb les +ko wski +inter action +sh u +bou le +ic an +j are +han k +leg itim +k nicks +kar ma +recei ver +per ks +u h +sta ir +sun i +labor atory +gra ves +voc als +oo t +c ture +thri ve +tic o +ãĥ ³ +b w +carto ons +mcdon alds +dra w +y ung +pl er +li d +eth ical +groo ve +ent a +international womensday +pat ron +wor ries +ðŁİ ħ +ðŁij ĭ +ka therine +di az +tor i +bach chan +tru st +min eral +ic om +buil ders +bor n +col oring +lat te +ca se +revolu tion +tra der +ox id +chi pot +inst antly +sou thern +se hun +pro b +her nandez +lis bon +hu awe +p ong +me a +ro oney +wheel chair +ke en +be tt +cor in +regulat ory +di splac +ka ren +sch em +sun sets +wh ales +remin is +he p +hi de +mar cel +pand ora +do yle +th fc +ot to +no kia +trans gender +ko v +hawai ian +sha ve +so vere +exc er +nick i +pu g +st or +ro th +wee t +leg al +dig nity +po w +hom age +ðŁĩ³ ðŁĩ +s re +can on +la x +wo ah +quart z +ñ a +gree ting +flick r +nai robi +advoc ates +an c +vi i +eu gene +th ra +c re +el an +pen sion +th letics +ton i +re agan +x v +sto re +ben ch +har lem +todd ler +sent enced +âĻ¥ ï¸ı +glob ally +che aper +u f +ma m +nic o +ik u +tho u +ni st +dam i +th ala +rho des +sal e +bow ls +â Ī +las vegas +sanc tions +adm ire +mat ched +un able +travel er +ele ven +straw berries +âĢĶâĢĶ âĢĶâĢĶ +stu dio +jac ques +im s +valu ed +s no +cheese cake +n xt +e os +s x +f x +ton ic +hat ch +chic ks +gra ds +hand ic +r ory +as p +ri pped +denti st +n en +lu fc +âľ Ĭ +di ge +hop kins +sher man +f da +for all +ash ley +str and +h y +liqu or +buffe t +ess ence +phar ma +suri ya +ðŁĴĻ ðŁĴĻ +festi vals +z an +re fresh +pur ple +uni forms +kenne th += ) +as an +hel sin +transform ers +k ali +person alized +chal k +bo bby +â Į +the mes +depar ture +prin t +illustr ations +qui et +agre es +gri ff +Ø ³ +m iti +toge ther +conven ience +ab ar +car lo +turt les +info sec +some what +ar lington +scholar ships +emir ates +mu ms +st ella +auton om +fe ather +g ore +nom inees +fragr ance +Ñ Ĥ +w ong +thea stern +gr e +z illa +is i +bump er +go o +do zens +ab duc +âļª ï¸ı +o ils +don ors +sil icon +i pod +fortn ite +ðŁĴ ¨ +tor o +spark ling +consci ousness +pal a +nu m +moun ted +ffin s +thi eves +team mate +pra b +om er +ta pes +bo d +mit su +ste w +e re +p bs +tu sc +lo we +ra de +parliam entary +h m +ed gar +ðŁijĩ ðŁijĩ +to a +a gh +hon i +s late +ge ek +ap t +hard t +ta p +horiz on +grow th +make over +hi l +paper back +id an +reha bil +gi u +possi bilities +let tu +fran co +bo ss +ach er +does nt +mo e +ta ker +huss ain +ml k +di l +th ia +ham a +real ised +raven s +curric ulum +m ith +k night +ted x +r v +isai ah +cumb ria +birth days +f ing +pre z +mu barak +exquis ite +clear ance +y en +par i +ev o +à º +modi fied +app lying +imple ment +disco vering +chap man +indie game +dis k +crowd funding +mach in +li vel +sty led +âĿ Į +ma king +rehear sals +nutr iti +subscri ption +and ro +cre ators +car ries +ky lie +cam den +appren tice +tax pay +c ca +tuesday thoughts +pis sed +er man +dete c +freed om +mer i +.. ! +psal m +sun light +per spec +be ings +book store +rock star +fun ctions +p ence +fav es +z n +obam acare +sp ill +coven try +pi geon +pi vo +ba it +kol kata +av al +don or +wa h +privi leg +tra ditions +rajas than +ten ess +portugue se +yn es +tack les +de fic +tor n +pol ling +thor ne +in a +bened ict +bar ry +cal ories +ver dict +save the +nor ton +off ice +main stream +impro ves +fr on +respon ding +real tor +scotti sh +de clar +r l +shi v +supp lier +re sting +swee ts +qu i +. âĢ¦ +whit ney +startu p +thank you +teach er +h alls +ha ve +hand made +pro ving +quar tet +ro chester +li an +virtu al +mend es +of icial +mid lands +x box +meas uring +o vo +accommod ation +bri des +collegi ate +intellec tual +in car +ni ag +ðŁį · +sf w +coco a +co ats +civil ians +presi dency +mat rix +sweethe art +tri athlon +wag ner +ra dic +plann er +the o +execu tion +k um +the walkingdead +sc ar +ro tation +blo gging +bom b +re son +bb les +st are +assi sted +e do +brand ed +war nings +thor pe +acknow le +satis fied +sho res +ri d +dor a +phys ically +bi gh +appro ves +ha h +ric al +vers atile +pret end +lu m +ab hi +ye e +sp it +ãĢ Į +dj s +ash tra +j t +ven ues +gram mys +cy clo +tr acker +over watch +repl ica +el yn +nr l +lind sey +hom o +ballo ons +kitch en +si s +am os +ende av +ðŁĴ » +a rec +thu g +hoo ked +hr c +new york +bur gh +americ as +patric ia +ug u +ap athy +ha st +psy chi +cor k +petro l +ðŁİ ¬ +ak u +po pping +psycho logical +au x +g ma +cad illac +wa ste +auth ent +bri stol +nam e +que er +to ber +jer ry +com in +ch ant +privileg ed +op ar +lo ser +tex t +mar ker +stri es +equ ally +ak i +christ mas +gare th +ble w +em ma +imag in +se als +che at +conditi oning +j ana +ren s +dar ies +o asis +disc ounts +coun cil +i ka +shir ley +vou cher +al ps +w x +q r +dri ft +attemp ting +ut c +Ø ª +gonzale z +m f +jo ker +paralle l +pa re +aspe cts +proce du +n p +am a +rale igh +bright en +gu ire +radi ation +cre scent +ho b +il le +str and +v ore +n ard +che st +di wali +av atar +al der +d ling +pa thetic +ðŁĴ ĺ +spir it +jor ge +film making +ðŁĻı ðŁĻı +challeng er +b j +down town +ht ml +ade qu +twi sted +in ely +( ' +wra ps +oper ational +y ne +n us +mag net +market place +health ier +snap shot +dam on +inter ven +fe derer +ow ls +biscu its +j p +ro deo +blue berry +lec tion +fron tier +summ ers +re yes +pede strian +go l +caf fe +refur bi +bou lder +me ghan +speci alty +la ss +e i +suspec ts +appro x +rr r +ra th +st im +cru shed +he d +wh un +lo af +cr ore +river a +gene tics +so ck +wa sted +ny pd +answ ering +do ve +bel la +ol in +du n +fi ji +pre tty +spar kle +y un +j d +euro pa +li fts +am ber +mu r +te k +boy d +roy alty +in do +ri b +go tham +ti est +inst alling +ke mp +the photo +cos mic +) )) +whole sale +loy ment +eas y +su ing +sett led +af p +pro ver +suppor tive +re es +ne ath +deli ber +c é +wel come +pic oftheday +new born +pat ty +sun s +si est +fl int +diffe rently +spo ilers +troop er +g ins +cor y +look out +equi pped +ta pe +to by +resear cher +u sh +ke yes +al ma +induc tion +k w +k har +sl ick +bri de +e ur +cra ving +book ings +ch es +tr unk +vern on +sp her +cryst als +rel atively +pom pe +uni ons +val ley +par a +w ant +ok c +de af +ser gio +len non +sh ay +cr a +v at +he e +t we +liqu id +pol y +ðŁİ ģ +b ent +be aring +motor sport +bar be +te sti +han i +fin ancing +astron aut +water colour +ri sh +comic con +gar t +wr ong +ber n +it an +ste pped +fil ters +c low +me x +dem ons +all o +expand ed +comm and +et ers +go ats +si ri +y r +pot tery +mari on +i le +el an +san to +person a +du ke +hom eless +li ghted +wheel er +chang er +cab bage +sur real +ham burg +sma shed +str an +k not +i art +ob i +be dro +di al +th ick +b ingo +fu s +vacu um +con ve +ati ve +accur acy +accoun t +re fer +ri z +spider man +ban a +r ite +u b +ab s +medic al +lin k +si em +> >>> +be tra +g lowing +re actions +pupp et +spa ghetti +ang s +re medi +pray for +roy ce +char lotte +£ ï¸ı +gh et +affe cting +ro de +soci alist +mo ses +az i +o it +re porters +cd t +ap ing +s nat +minim al +wa ist +sie ge +>> >> +ri g +schmid t +h are +ec a +thor n +he mp +es the +cly de +th a +don ut +moham ed +ling erie +le gg +carpen ter +perform ers +de a +imag ined +cur se +la sh +ct r +agu a +ro ar +gr i +ro le +j fk +resur rec +roosevel t +maril yn +sm alle +will is +wa ited +char ities +the res +li k +origin al +car i +c ough +cru ci +la gun +contra st +k ou +arm our +re moving +t ent +maz da +bri ghter +thi ef +cor ner +tequ ila +buzz ing +al bi +p am +az ure +disc oun +pixel art +possi bility +ham ont +tra des +bu da +hi ve +vers y +fin ch +tran spa +em i +terri fying +in qui +g ba +sub stitu +collec ti +plac ing +cin dy +k ann +pa tho +diamon d +mour inho +guine a +anthro po +air s +pu mps +ì ļ +pas o +cur ling +an ita +resi dency +ne wh +jo on +cigare tte +que ue +ex trac +gam es +spl en +ex press +public ly +bon nie +tribun e +ba ek +reason able +c or +timo thy +she eran +Ä ± +f dn +su tton +concentr ation +carav an +x avier +al ger +cy lin +freder ick +ner ve +pe ak +lettu ce +j ail +pre game +kav an +up graded +eco logy +squad ron +gra pes +goo g +pa stry +ðŁĹ £ +ãĥ¼ ãĥ +mil ano +awa z +presen ter +ðŁĮ ¿ +her d +king s +tem plate +fl our +h v +k ley +i ya +spe c +at er +frankfur t +co ch +tex ting +del i +communi st +regi ment +ele anor +anticip ated +ðŁijĮ ðŁı» +thephoto hour +ran o +survi ving +simul ation +daw son +ar in +aqu a +m or +âĢ¦ . +cin o +ira qi +sh az +dun dee +we s +dra u +hann ah +s news +occup ation +ste en +x m +ang les +sett ings +gur u +kno x +or ca +shap ing +w ent +dr illing +zz ie +br i +kis sing +fin d +ma ine +âŃIJï¸ı âŃIJï¸ı +ðŁĮ į +lar ry +bu sted +ta vern +acti vely +- " +replac ing +no d +un lock +. " +âŀ ¤ +affili ate +to w +l n +happy newyear +di f +j m +green wich +contro versy +daw g +con dol +sav annah +compens ation +touch down +te o +amb itious +embro i +convic ted +iart g +bar ack +tr ance +testim ony +au dition +thum b +my ths +be x +que z +orch id +den y +entit led +hoo d +gr ant +in box +blue jays +r illa +smalle st +bur den +in famous +divi ded +boun daries +t ter +el t +wy oming +be verage +me sm +one ws +budd hist +y ana +as sad +is ms +bar rett +predic ted +back to +tw it +e there +cap tains +escap ed +ay o +lam borgh +gard ner +la ps +k al +adverti sement +insec ts +na po +am en +ac y +r and +g k +te h +k athle +tri dge +pan cake +at ro +pyram id +bu la +paral ym +gau ge +en cies +tom y +biscu it +but cher +quali fier +coun ty +ke i +po ols +dar ker +should ers +ðŁĩºðŁĩ¸ ðŁĩºðŁĩ¸ +sp re +( " +writ ers +g m +ðŁİ ĵ +k nit +hu ff +mt b +philli es +o st +den is +g art +licen sed +inter face +ex cel +d well +from the +co fficial +az zi +appear ing +fore st +n ana +ke ith +manufac turers +beck ham +) ? +e se +col ony +delic ate +ut ter +mc in +transpl ant +pre ferred +par d +ari e +hu b +po ds +perspec tives +pic t +del u +app er +be than +p mo +crimin als +femin ism +sh ack +circum stances +fel las +prote sting +wa x +sugge sted +t ator +dre w +om ni +fa ke +kath y +re b +del ine +ber ni +mi sty +ðŁij © +er able +break through +men swear +millenni als +chan yeol +la z +inser t +rep lies +phra se +n x +ihear tawards +audre y +gran ite +rac ec +ori e +ter ra +innov ations +britt any +at eral +pe ar +bio logical +sh ments +institu tion +m sn +frequ ency +d man +neg lec +t f +ste fan +fox news +ty po +comm s +sequ ence +car men +wh ites +econom ist +exe ter +se um +re sorts +cas ually +bun de +divi de +Ø ¹ +ga g +cre ed +reti re +cau cus +rapi ds +wrestle mania +tul sa +sunder land +fundam ent +o di +yam aha +v ary +intri gu +el se +be acon +an gie +tra ded +tran sm +g ents +kn itting +gal ac +ðĿ Ĺ +u to +sea side +hol t +re rs +far go +train ers +mon soon +b ale +sou ght +mad die +h w +co li +fr an +fav s +ðŁĴ Ķ +int ent +r ally +s bs +lemon ade +barack obama +bre ad +stick y +explo sive +chel ten +t j +as soc +ram en +hom ies +v log +mi ster +lor d +âĢįâĻ Ģï¸ı +aly ssa +sketch book +ru mble +cat ch +migr ant +discipl ine +un likely +chronic les +fl ora +sl ams +am id +s boro +coo p +ju mps +tran qu +mel is +sof ia +en ri +gab e +sy ri +nicol as +cha i +w v +be cky +foo ty +ta o +suppo se +ðŁĺįðŁĺį ðŁĺįðŁĺį +plu sh +ri sh +ðŁ¤ ĵ +k ha +satur days +ac cent +he c +lim it +carl ton +wi red +taylor swift +ðŁĺ ij +sq l +har ro +recipi ents +g at +go p +th of +amaz ed +gh an +ðŁıĨ ðŁıĨ +por to +cla re +di stant +na c +ohi o +ðŁĻı ðŁı¼ +mt n +anti bio +dino sa +me sa +par tial +b v +lear nt +lov ato +questi on +ex tract +gossi p +gi bb +niag ara +ðŁij ¨ +displa yed +so oner +ste vie +nug gets +ml n +bro m +tur b +give aways +stu pi +bl ink +c ili +conven ient +mo h +vi ve +f ric +cau se +cham ber +cu les +ne arest +is se +small biz +t j +canadi ans +smar ter +bra sil +ra re +que tte +w ha +cand le +at omic +ðŁijį ðŁijį +warri or +relax ed +stri ps +ne ur +k ka +r fc +jen sen +reco vering +respon ses +sal am +ortho dox +acti ve +ell ers +n it +âŃ IJ +metro politan +centu ries +vi da +gra ding +transpa rent +sim ple +do ts +superint endent +elev ator +autom ated +red skins +ima m +summer time +jona than +ge aring +michel le +confl ic +m ice +to te +publi sh +pa x +) - +na iled +á ´ +tele scope +ser bia +ba b +ape u +st ically +sen ti +r ats +isol ated +grou p +hat red +paranor mal +stan ley +ali on +safe ty +l s +ठ° +nex us +alexand ra +mas ks ++ + +tr on +au k +brother hood +brow se +mix es +sim one +mu sk +appro ve +lo la +ex p +per th +fu turi +un seen +d m +chel se +sc outing +o we +portsm outh +k ram +mi ze +di spen +su p +d lc +adver t +tere sa +is le +cy cle +met all +shi elds +marin ers +ra z +ing en +fun d +an go +jon es +o ka +mad den +broc coli +domin ic +situ ations +mer o +cric ke +puni shment +d b +sha king +ðŁĺ ļ +m q +ari ans +le h +cla w +we ds +d ure +ni el +j elly +gour met +tra ders +le vi +w ages +kne es +wi se +heaven ly +avi d +melo dy +z ack +ban anas +apprentic e +pro p +fun ny +o de +respec ted +me gan +fe wer +dra fted +med it +gra pe +us army +cru sad +vo cali +prepar ations +non sense +us age +th r +ro th +wiz ards +insi de +promo tions +mon a +red sox +si g +eleg ance +ch ia +univer sal +ãĢ į +ra ja +un ga +pol lin +filip ino +ak a +t sun +ik on +bi king +decor ations +z ac +cade ts +hum our +ag m +re ppin +vac cin +elo ve +u w +dia be +galla gher +az er +do l +a while +pro minent +wel sh +t ann +' ) +bi en +wa g +in al +c wc +wic ket +ur st +q anon +x e +out door +dun n +star r +co logy +ric ky +u efa +reb ounds +s music +inf ant +ðŁĻ ĭ +so p +u mber +hand ing +beg in +sor ting +ha sh +sp ati +re k +buda pest +black hawks +dele te +ro m +can did +auth ori +de bris +spe cul +inter section +marri ott +im ran +ðŁĺģ ðŁĺģ +cru ises +ram sey +rafa el +aware ness +vas cular +beyon cé +ru g +ðŁĺ Į +festi v +ar am +s able +bas il +p ill +flo oring +un beaten +implic ations +u f +w ound +for ge +poin ting +po ts +popular ity +ðŁijı ðŁı» +mani pul +s lots +deb ates +abs ence +ver mont +never forget +wri st +gl oria +ren ce +hu sk +mel ting +ðŁİ Ł +br aces +tim ely +transform ing +am ps +ma k +po e +ah an +gener ally +nd p +ale ppo +unic ef +pro fs +nor d +ma sk +jackson ville +v v +sh ells +bloom ing +oper ators +char coal +ne ville +ma gi +chi p +sam a +ir an +re forms +accu mul +ru e +æ ľ +web sites +ga on +devast ating +sto s +glaci er +ra pp +chipot le +pr a +or ous +rom ney +seas on +decor ative +c isco +dit ch +compla in +ll o +assu me +ðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤðŁĺĤ +n els +cent ric +ft w +car rots +tat a +can ter +per ience +li ers +demo s +bl unt +oper ate +reserv ations +le ah +sub stance +di son +an te +elec tion +v ue +squ are +non profit +ca a +f su +y am +ãĤ ¤ +v ladi +comple tes +mar i +philli p +ne ill +er as +ka it +men do +mahar ashtra +g p +dan e +provi dence +ther apeu +juven ile +me mo +in corpor +aa aa +seven teen +teen ager +à £ +or ns +wi de +cu teness +tw d +ff les +bar a +com edy +over time +y az +bar on +unemp loyment +ðŁij ĭ +exter ior +den se +cent res +match up +history month +artif icial +qu it +e sk +war n +cr itic +j af +ðŁĵ ² +inform ative +fu els +recy cle +nam ing +stri pe +sol ic +mole cular +dee pi +con vo +s sel +na e +de scent +ti z +accoun tability +ter ry +r ito +sl ay +em o +dem ol +sens ation +co v +tor e +round table +y ol +excu ses +ॠį +tur quo +hh hh +pod casts +cele b +me ssi +li o +man n +contribu ted +u z +gener ator +ele ts +veg gie +indu l +en suring +detro it +pun jab +tran spor +instru ction +ad d +por cel +pan eli +cir cles +persi st +clay ton +sp n +dog softwitter +is nt +sp r +retail ers +p w +hun gar +el ena +mon aster +gu atem +je ssie +an z +ra shi +fle e +car ving +fau x +l al +hen ri +d jo +du ll +s ana +lar a +glo be +cri mson +com pass +pau se +na b +lion el +ba ths +u fo +invent ory +sin gh +sat an +ðŁĩ ¸ +ce ments +in form +gener ated +bi den +av g +tas ks +de er +sa u +ja iled +pa stel +sc c +na il +steel e +per is +lamborgh ini +pur sue +mar gin +u ch +bo sch +dra in +cl ara +bo m +lat ino +web ster +rose mary +r ha +s oun +billion aire +not ch +percent age +con or +' " +hom es +earth day +h ort +big gest +di sin +wal ton +edit ors +im ma +om ar +equi valent +pharmac eu +ah med +cam eo +han ni +under rated +ge ment +micro bi +v oo +honor able +obe sity +âļ ¡ï¸ı +limer ick +invol vement +st agram +boule vard +bur g +blackand white +liber ation +fi ve +inter im +sm m +rival ry +cap abilities +stat ements +thu mb +ve d +sw ans +bar ber +e que +seren a +hel m +noo dle +sam pling +n awaz +sing le +thunder storms +sh on +in ev +ë ¯ +to pp +orch ard +bi an +ðŁĺ Ķ +door step +salv ation +marke ting +r ons +cle mson +ra vi +in take +stand with +sin a +ha iku +ple y +elector al +ph illy +la ys +electr ic +cap turing +u pp +er gy +believ ing +cul tures +es day +inva sive +ed ed +spee ch +end ur +viet nam +boy cott +pe de +deli ver +ðŁĴĸ ðŁĴĸ +mer chant +st ir +den ies +poc kets +o ti +cu ddle +ro land +mm ed +den ed +lear ners +hoo p +sour cing +h acked +di m +environ ments +ben son +jud icial +wor cester +pear ls +govern ments +arri vals +cor ners +tun ing +la bour +y m +or dering +le wi +i fe +hygi ene +thou ghtful +indone sian +campaig ning +princi ple +assau l +ru bb +at v +wil ly +en tre +il i +ph on +du ties +âĻ¥ âĻ¥ +sn akes +lo op +am ar +conver tible +bon ding +ment oring +max well +ethere um +destro ying +ax is +ca iro +fin nish +sho ck +ðŁĺ IJ +cal eb +com a +pe dal +co re +contin ent +el son +temp o +helsin ki +ac p +tack ling +st ated +bl a +dou b +sma shing +a ja +camer on +disru ption +warm th +being salmankhan +bullet in +o de +syrac use +ar an +mc gregor +bul k +an ton +confir mation +sp ine +im ran +instru c +jac ks +chi o +pal m +str e +embarra ssing +un t +elimin ate +to ss +c ise +a ws +oni sts +sh inee +jo s +ho se +li vely +opp onents +mo vements +recogni zing +sandwich es +sh akes +exerc ises +se at +profe ssion +merry christmas +lu gg +adopt dont +mar vin +byr ne +un le +he t +ku wait +rah man +aspe ct +humb led +gen es +f and +long time +) ; +cam pu +an gus +ðŁijį ðŁı¼ +q uran +sle eves +s lic +¸ ë +twel ve +your e +i ke +go gh +b st +dic tionary +reflec ting +to on +yar n +em bed +ðŁı ´ +re serves +floo ded +ver iz +du sk +estab lish +pro li +au d +ritu al +or bit +declar ation +recor dings +cam o +cas sette +good luck +cu tter +bo p +b ho +che ating +paci fic +ma res +tim er +col t +tr ous +tomor row +han sen +ci e +w ang +ban i +circu lar +ac ute +far mer +co ys +p se +ir ving +w j +haw kins +b ison +ur day +cru ising +o te +k ath +whi stle +your selves +ant is +sla sh +thorough ly +ke sh +ser ie +ex em +en ig +guil d +sh red +ho gan +ap o +ä ¸ +pu zz +ne tball +au ssi +panor ama +ws j +av is +ar ming +hum ph +brow ser +cri es +fo ggy +mat te +ðŁĮ » +it er +tal lest +by ron +cap tiv +je su +any ways +flag ship +p ton +we y +fay ette +financi al +f oul +solom on +jenni fer +cucu mber +ar gue +tex tile +wrest ler +john ston +pa stor +ðŁĺŃðŁĺŃ ðŁĺŃðŁĺŃ +cac tus +edi ble +re served +ric hie +met res +ingredi ent +h ella +un to +ch ol +cele bs +po ets +gra ham +hay den +coinci dence +b aw +communic ate +flet cher +/ - +tole do +ecu ador +coun sel +s laughter +line ar +at p +os u +jo el +ev ed +conqu er +ru stic +plic ity +recogn ise +room mate +cr acked +jas per +ph er +ðŁĮ º +wo ven +mo ist +ff c +ste ering +ni sh +stand ings +frequ ent +ar di +haz el +as msg +bau m +d art +si dd +nat h +ch ero +card board +c ss +n sfw +pa ir +ðŁĺį ðŁĺĺ +occur red +homeless ness +mal one +ph e +xi a +pad dy +decl are +theat re +b f +per sian +ta d +ax e +susp icious +lam b +mu cho +sen ior +st as +k ite +st ing +gra d +k af +wat ering +Ø ¯ +spi ral +th ms +educ ator +jer ome +of c +clo ck +su l +pe mb +.... ..... +park way +de aux +restric tions +m ons +need le +e j +le agues +water melon +am an +pl enary +max im +w ab +coming soon +bry ce +vi gil +super market +fortun ate +turquo ise +presi dent +li v +inter ns +feel in +fix tures +stun t +st aged +premi eres +lo k +prac titi +shor tage +log ne +ve c +con cor +roc ke +li g +com posed +syn thetic +di p +cam ila +ch is +j ou +su san +eye brows +supp lement +satis faction +moham mad +ti bet +house of +pu n +as sam +shado whun +psy ched +se duc +mand atory +her bert +sc allo +stream ers +proto col +block buster +produc es +sch nei +lau rel +tri be +time hop +pl a +mod elling +tv time +mtv stars +wi dow +me tric +ch am +con do +flow ering +ale c +d ms +inten sity + ¨ +mccar tney +islam abad +k b +f fi +ph al +anal og +f ond +h acks +positi vity +treat y +sub marine +conne ct +sel en +categor ies +cu b +organi ze +si k +quote oftheday +remin ding +am or +loc king +ðŁijı ðŁı¼ +comp ound +et te +b out +rec ur +fe rence +mi zz +tren d +hip ster +for tress +forth coming +preli min +o dyssey +ang p +del ici +even ings +ðŁĶ ¹ +i q +d w +da ir +kathr yn +christian ity +moon light +ha b +wh oo +f bf +se th +genu inely +pa x +char ity +deplo yed +b nb +bu cs +ju dg +con ge +plant ation +im press +car a +sc lub +sco py +land ers +compla ints +b ama +re build +x y +real ism +sh our +le in +brac elets +mer a +assas sin +an chor +ðŁijĮ ðŁı¼ +lin en +con fron +chronic le +comm ent +cat alog +il les +gor ge +me try +jung kook +love my +sent in +se em +fit ness +alli ed +ts man +digital transformation +pr an +lo ft +min ton +alden richards +en vel +cher ish +certain ty +zz z +rhin o +per kins +en rich +cape town +ome ter +sec tions +ske leton +def enders +ðŁĺ Ŀ +pen c +bri t +ja h +capital ism +ðŁ¥ ĩ +baz aar +re me +ex t +kk k +conver t +stor my +b ye +kar an +chry sler +ad os +pre ssed +syn c +ation day +dang er +bad ges +refu ses +em powering +ly m +ex ports +adoptdont shop +ðŁĩ ¯ +th c +awa ited +focu ses +fin ed +o at +haha hah +âģ © +n family +fi ona +luck ily +thr illing +ty ping +out break +di es +he u +craw l +ne sses +o ath +scri pts +gee ks +ðŁIJ Ŀ +p b +mathemat ics +al is +________ ________ +gymna stics +acti vism +recommend ation +gre n +wa in +cour ty +n apol +cau li +hor nets +g als +jo ckey +dir ty +at ar +enor mous +pe st +greg ation +an os +ii ii +def ends +black historymonth +at x +mb c +lugg age +wit ch +co b +la sts +cu m +gg g +ba thing +n ar +ce bu +ðŁį ĥ +navig ation +min e +re jo +ðŁİ Ģ +gif tide +re ta +use less +pu ll +defic it +al lu +ati me +it v +tr illion +pu e +ac ies +proce dure +l ori +jen ny +c ad +ul ously +dr ac +promo tes +ing the +can u +woo hoo +na omi +zar dari +ts u +be ir +sd g +le ver +we ber +ab ud +lun d +crow ded +deplo yment +ter rain +ken ny +ho f +witne ssed +lo ch +j k +bul ly +w ren +poe try +do ff +ww i +mo red +din i +cul ture +promp t + ¥ +maur ice +to pps +r m +cor respon +ab out +jewel s +gi br +eag le +ðŁĺĺ ðŁĺĺðŁĺĺ +l ending +sou ven +ç Ķ +contemporary art +establi shment +j ong +âĢ¦ " +gat or +patri otic +mc coy +v ape +human e +feli z +coach ella +re posting +ste als +fu ller +n ering +at ra +( - +bla ke +he ather +wor ms +discipl inary +rede mption +y ard +am in +" @_ +d nc +t ds +k appa +ne wark +comm its +spe ars +j ams +t and +msn bc +inter medi +aim ed +at ic +teen th +observ ation +kash mir +kavan augh +ou l +san francisco +re u +bel ated +cho w +pass word +st ills +deta ined +sar i +day ton +dar ren +itali an +ar th +amu sic +ar bit +w m +v m +he m +dou g +my r +a sho +pre v +vin d +bra h +sta g +ภµ +pre views +gu k +con taining +leon ardo +sad dle +ru shing +st av +lon gh +gam bling +ve gas +reserv ation +end ale +bal a +fl a +vari ant +he dge +bulgar ia +nat ali +we aver +sol st +encoura ged +ap c +as parag +ne st +cycli sts +fe l +ìĬ ¤ +overwhel ming +pey ton +j it +a post +mb le +ble eding +neighbour hood +a very +expre ssions +mac donald +gi gs +mon ds +illu sion +n ct +cam ero +over head +my th +ol y +vi o +et v +lau rie +unve iling +pri or +con n +iron man +di ff +day in +crit ici +con go +re vision +wal e +direc tor +p ines +black pink +gar ner +cur ated +manit oba +h ac +common ly +bar ton +.... # +mor tality +live smatter +philos op +shor ter +con vince +fre ak +vend ors +insi ghtful +el ly +sens ors +e led +s berg +weight loss +u kip +sp ur +priv ate +qu a +ss c +, ... +supervis or +advis er +amaz ingly +less er +at es +mah on +oooo oo +sar as +pmo india +waff le +un ders +toler ance +sculp tures +her sh +kno cking +smo ke +cathol ic +gri m +tra veled +fli p +ge off +dinosa urs +sle pt +scar let +ok i +compla int +ob sc +nam i +la g +cross fit +u fc +mc cain +refe ree +sad ness +pen ny +li eu +mo de +ki er +vol s +w is +el on +she a +ba o +son ia +cla ire +em manuel +moist ure +di gest +vi ii +t eller +ch on +access ory +night club +foss il +aw an +hu sky +ab original +brand on +ffici ent +cou gars +ste d +ad mitted +igno red +content marketing +ag as +v ase +execu ted +negoti ations +she ad +n and +tab lets +go th +ts al +d fw +on ep +protec tor +sp ho +gaz ette +andre as +ss er +comp ilation +ha v +contain ers +bro ker +soc al +porcel ain +hy uk +air ing +ðŁĴ ° +publi sher +scen ario +spart ans +re viewing +itu des +ed el +pear son +ba sh +mau i +a ad +ðŁĮ Ĭ +li u +ul ate +program mes +fav our +web design +real ty +motiv ational +cro sses +' ... +bus ch +adjust able +ar jun +mist ak +dimen sion +pi stol +weigh s +en y +unve il +indy car +gor don +f ade +fran ken +qual ities +bet t +loc ate +ker r +sp c +confu sion +ne e +luck y +bas es +dep ends +fire fighter +ol a +re t +mar oon +ðŁĶ Ĭ +w am +defin ing +whe at +bi l +é s +b hai +psy ch +ta u +ic ans +thi k +ob ile +inspec tor +ìĨ Įë +ill on +go s +ev angel +fa i +si st +voc ation +bur ge +chi stan +renew ed +enthusi asm +en ting +ag ri +ike a +m sc +aero space +sens iti +memo ir +hosp ice +co caine +der ry +mechan ics +Ħ ภ+tin o +reduc es +collec tors +in justice +supp re +v ana +ab un +nap a +su sa +os lo +e ff +en core +lic ence +ched dar +z al +moun t +ðŁĴ IJ +threat ens +!! " +archi e +fu tsal +scu ba +jo s +gn on +se xi +s official +compar ing +domin ant +tof theday +fa it +propos als +gi ft +y as +cn c +l r +ha b +reser voir +beli efs +gener al +mar ti +t d +est e +ì ł +wi l +ðŁij ¯ +ðŁĶ « +sp x +et work +excer pt +e instein +hir o +sil hou +team ed +per ception +corri dor +mental health +hin ts +ben ny +induc ted +sw x +wi desp +spe ak +cher yl +dru g +ðŁĺ ķ +h f +asparag us +myster ies +fitz gerald +off er +therap ist +care er +dam aging +ts d +per u +wei bo +y ay +phoeni x +disc re +mac book +bar ker +stig ma +sp read +roc kies +kang ar +bri dg +pa i +bi shop +ta iled +capsu le +ðŁĴ ĵ +ge of +roy ale +short listed +o ste +ash amed +ch app +key e +cl a +screen shot +austri an +nati ve +en ight +juli et +michel e +ðŁĮ ´ +travel ers +pi l +football er +win chester +ðŁĻ Ħ +azer bai +gold eng +organis ations +interpre tation +predat or +ofthe week +lo gan +pok é +mari e +cal la +t nt +cin de +ge tic +fit fam +gra v +ow ens +ðŁĮ ± +shoot out +sal is +commissi ons +co he +p tic +ni xon +hi a +amb ition +mar ine +cruel ty +t k +cru de +sal ty +jim a +mon go +ir ony +on wards +arre sts +strang ers +ig er +cycli st +ra g +exten ds +tra dio +bour g +mo i +el la +e able +lex us +au l +der a +histor ian +mor ton +ti ff +man ner +ko t +d k +po inted +mar qu +a an +en ey +du blin +on poli +em ili +secre t +fl o +âļ ¡ +ba j +ste ep +accompan ied +rum ours +dev i +purch asing +fi g +pu b +sch oo +autonom ous +go alie +x ia +autom atically +re vers +ter o +fu ku +titan ic +shoo k +sand als +see kers +exc av +nor dic +bigo live +ba ke +r att +z ak +ne p +ðŁĺ ¤ +cand y +billi ons +book worm +pp et +à ³ +sur faces +sc ars +phil ip +do gg +ci gars +co te +transl ated +cur ator +sin dh +han gover +bre wer +on es +el ton +ðŁĴª ðŁı¼ +mar cu +elli ot +righ te +di oce +ru ss +rail ways +grand son +as cen +apo logy +awa it +mob ili +re spir +parti san +oli vi +stri ke +yo o +white house +expre ssed +pu ps +bed ford +cul tur +fro gs +fly ing +cav ali +c ds +fri ger +street photography +re solve +tali ban +kan g +cru shing +ju m +ðŁĺ Ĵ +william son +tan g +cur ly +t man +veter an +fa ire +artificial intelligence +un anim +pre n +back drop +fr ances +oc cer +doro thy +work ing +ar thr +conver ted +day light +serv ant +pad dle +compla ining +thir ty +nad al +ak u +ibra him +ad dressed +p iss +green house +batt alion +si mulator +out lets +embroi dery +ðŁĵ ± +fis cal +ger ard +sas sy +ðŁİī ðŁİīðŁİī +vent ures +mer it +public ity +ðŁij Ī +sophistic ated +c tu +conven tional +condol ences +isra el +tra dition +ar an +te ss +gla d +ðŁĺĬ ðŁĺĬ +correc tion +ge on +am d +or ship +be ast +ch ment +ì ŀ +nic o +wk nd +wel s +cushi on +beli e +vo c +idio ts +under neath +pu ma +corn ell +en ation +lu l +swa ch +ab ig +u rer +mi e +form erly +ca f +er nal +chor us +juli us +sen ator +âľ į +wh ir +salv ador +ph d +uni fied +boo ster +graph ical +w rec +son ny +mi z +dere rs +s all +ven s +tusc any +wi d +y ong +kur ds +w az +trol ls +mac ro +cat urday +pre ssing +sa sha +cent ennial +gu sts +em c +be fore +den ise +cu st +ðŁĵ ¢ +lo oo +base l +eng land +y olo +ar du +manife sto +do ha +ì ľ +kni ves +bourne mouth +bi bl +bar b +al icia +Ø © +com er +cycl one +g it +ane ws +character i +vent ura +in tra +sf giants +hu t +be a +dar win +ell er +al v +re ese +bl y +kar an +conclu sion +man ny +fla kes +unite blue +nad u +co pp +ed ges +lanca shire +i als +o tta +philipp e +l ent +che e +ment ors +festi val +an ism +compli mentary +r j +pu g +d ine +we i +cli ffs +sar my +ti veness +treas ury +il and +after math +rabb i +ou n +bou quet +herit age +zi on +sur render +shen an +in ks +kar l +gh ty +pol icing +exam ination +ce y +per su +measure ment +hydro gen +lu han +âłĢâłĢ âłĢâłĢ +war i +о Ð +j y +fow ler +mis h +al fre +âĺ ij +bb naija +cat alogue +recogn ised +sa ver +hu skies +col in +mun do +si va +p ng +discoun ted +man utd +fre sno +de vin +prelimin ary +tro phies +pla stics +du g +pro cu +indi go +g ard +dy lan +pit ches +ground breaking +in son +bl ac +an thology +f h +expl ic +r ard +admi ral +so chi +la shes +splen did +en vy +ad v +sex y +festiv ities +stic king +bi b +thr ill +op p +ari el +botan ical +endur ance +fe males +br icks +vat ican +black pool +ber mu +br ough +roll er +bi d +sue de +sloven ia +mm ing +ml b +med alist +di ans +rehabil itation +ne on +s go +li thu +ram os +z ed +pi anist +inten sive +broad band +stu dy +peter sburg +lu ca +ah hhh +phys ician +dill on +tele com +gri ef +mu n +ac ro +si ded +s ly +blo ws +classic cars +tri um +ar gy +? : +h ri +marsh mal +âĢ ĵ +to pping +war saw +tran sc +preserv ation +b av +re friger +experim ents +ä º +gl it +sli ga +g age +fac tor +flav ours +br ony +sp o +cook book +carri age +aw ay +ny fw +on ian +w g +simp sons +ro lex +ðŁı ¿ +cro sby +ãħ ¤ +cre di +syn dic +pu bs +ali fe +poor ly +mac ed +ðŁĺ ŀ +behin dthe +w enger +n ats +ðŁİ Ł +rubb ish +procedu res +typho on +opho bia +er do +fu el +vi era +bu mps +millenni um +new zealand +lec tures +it on +mil ky +respon ded +ê ° +landsc ape +.. @ +bo ther +âĸ ¶ +z hang +huawe i +tu ition +s worn +in u +y or +pa olo +au ditions +ab il +malay sian +ho ps +fe athers +mp le +au ts +ã o +boun ty +ic he +ì ĺ +sh q +pin ot +ge ars +disapp ear +video games +t na +alzheim er +ðŁĮ ŀ +a ji +under wear +swit ching +sign age +o scar +ec on +dro w +cl int +pl ated +gun dy +emb lem +ho es +ici st +nel ly +juni or +road show +miner als +at le +alexand ria +ac claimed +v ell +shi va +ad he +en ne +amne sty +h ounds +councill or +ðŁĴ ¦ +aes the +part nering +influ enced +mag no +fl are +extin ction +civil ian +maje sty +va il +law makers +rac ks +mc c +ori an +sp ices +er rors +may er +co ca +pa i +s ooooo +reti ring +ba thro +ðŁĻĮ ðŁĻĮ +âĸ ª +su f +endor sement +buil ding +broo ch +pal la +arvin d +ag ent +kar ate +r hi +c tv +ta ine +um m +ba x +reig ns +uni of +enterpri ses +adel e +fla ke +at tire +bru ce +ba hamas +gra vy +sa in +che ek +tri vi +lo v +e en +bb lo +lady gaga +itt a +. "- +du stin +observ atory +eigh th +bloom berg +kh s +f cc +gi st +commemor ate +ve er +sexu ality +ed c +nic ole +vac ancy +u ser +son a +:' ( +dipl oma +t end +up grades +Å Ł +jura ssic +cardi ac +dr s +widesp read +à ł +dail ies +vend or +sim plicity +wi der +len ses +supp lements +de pos +ob served +vin es +parti ally +renew al +collabor ate +ali g +fin ity +ph u +zz y +pe tit +ðŁĵ ħ +z in +i gu +sm ack +fall on +ðŁĵ £ +back wards +comp onent +o so +compati ble +bin ding +zur ich +thom e +w ounds +ly ric +fresh men +sne aky +fi bro +di et +emplo yer +in sect +h ated +sch er +raz or +n sw +boo ker +califor ni +av fc + ° +preten ding +pep si +al is +un titled +k art +grand parents +e the +o ck +lux emb +visu als +small business +abdul lah +min ho +su baru +h ra +reve aling +heart breaking +clar ity +am g +sl r +** ** +âŀ ĸ +recor d +ici ary +min ded +ye h +exce ssive +knu ck +icec ream +tru th +ev ic +ta stic +ant arc +ren dering +, , +mit t +loren zo +st patrick +bound ary +zi g +vo cab +osa ka +fur n +tu n +gu l +s ounding +blo gger +utter ly +g af +adv ancing +l cd +mar gin +lifel ong +solst ice +sh ra +wa its +ple ar +bre ach +en ligh +ad er +itt le +c ation +ho on +stu died +?? ??? +k ash +ev angeli +ps l +wei ghts +met als +ty res +tur no +wi e +car b +g ale +se al +sun ite +am ic +patter son +á n +eu ph +up stairs +quali fiers +khali fa +apple music +ìĨĮë ħ +vau ghan +al ter +cru iser +mu a +t ana +kat rina +id ols +spo iled +secre tly +fi bre +part nered +um es +gi ov +com et +screenshot saturday +k eller +fil tr +fe t +con way +pe u +bad minton +gi d +m ound +don key +bu ff +lea ther +lar gely +bro ch +int ments +am use +r k +sto ve +impac ted +con t +cr acks +prison er +bar i +contrac tor +ori oles +domin ate +pol ar +am elia +dr c +ðŁijĮ ðŁijĮ +vi st +su arez +injec tion +blo oms +ðŁļ¨ ðŁļ¨ +sti ff +pay pal +sno wing +thur sdays +goo se +we dge +educ ated +weak ness +de cker +abud ha +bree zy +Û Į +hope ful +o bi +rai der +gh am +de u +se ve +par tly +fu t +infu sed +mer ri +than e +some time +hu e +me in +cre dit +sli ding +ran de +cher ry +dead pool +sh ol +ar am +under wood +sky e +distur bing +m nt +poli shed +guardi ans +ha dn +pic asso +ari us +ak shay +ir ri +j h +happ en +la kh +dal ton +at the +s well +mar sha +re h +cour s +j kt +top us +serv ice +r ink +hack ers +dono van +hor o +tc m +may hem +cha se +dev ops +ken sing +sc up +sh ere +quali fication +c live +ton g +n ancy +mar is +der dale +ber man +cinde rella +jol ly +ci c +loo t +collecti bles +hom icide +g ge +epide mic +su ites +mu ddy +gi mme +e rec +- * +tal la +lis le +embro ide +ðŁĩ© ðŁĩª +veriz on +ve ctor +be anie +arti san +ga in +flo res +vi gil +u so +ðŁĻı ðŁı½ +grin ding +gh er +air ports +respon sive +shaf t +can cel +ceremon ies +e me +at ari +bru shes +eag er +bo hemi +children s +yan kee +ma a +suspen se +mor an +mac ar +sun flower +cre w +vo id +ke ar +fashi oned +jen nings +sunday funday +sub missions +me ad +her man +wa i +crit ically +le um +baek hyun +for cing +co bra +ãģ ® +acqu ire +al k +ge ology +pri mar +import antly +ire z +bunde sliga +curi osity +sen a +stric t +con soli +win ters +ven om +chelten ham +ðŁį º +cen a +t at +ba in +glo ver +under cover +as ses +car n +memorial day +am eli +i rene +ch on +syn thesis +spe edy +mitsu bi +sla yer +compos ite +under stands +pe w +inter rup +hen ri +mor row +an om +thof july +g lee +thre e +ðŁĺ ® +and hi +ch att +renew ables +ye s +trans fers +!!!! !!!! +bab u +du ter +lo ops +pe ers +o ilers +pau lo +ic ation +h mu +war a +mer cer +hom eland +fu ji +ale y +year book +re m +re en +ab sur +bo is +] : +caes ar +shot gun +kur dish +o ren +ra e +anci es +ty pic +f h +def ault +re plic +lu k +trans actions +r ys +infan try +ðŁį ¾ +cho w +chick ens +ba gh +wy att +ay e +gg i +bre ws +ed itions +mi ra +commen cement +pre su +peris cope +ic hi +guatem ala +zam bia +pain ts +wit ches +wan i +un dere +cro y +vo ws +us mc +hear ted +theat res +shu ffle +le vel +mul tic +squee ze +fer n +app et +post al +mal t +on board +ld nt +co o +s sc +k ac +ðŁĺ ĩ +sc rap +mar cos +deal ers +ann u +mill er +co ve +ul ary +vladi mir +be ef +th ur +pick led +se same +bengal uru +mo tt +kathle en +hi st +no tor +dr ank +du chess +snow fall +e ff +tin y +j n +sy our +speci alists +scot us +bay lor +eve rest +mali bu +pre m +harm ful +l ali +b ates +g ye +differen ti +and ra +geome try +el over +black out +== == +ko ta +inter act +asi an +la yo +samu rai +fi del +exhau sted +gla di +pd t +spher ic +anti qu +guit ar +stu ri +ho pper +ang le +f ills +sla p +mi th +rod ney +ong i +in som +pre venting +cassi dy +ap ho +ore gon +lo in +ham mond +contribu ting +f n +gar ri +ori on +comp elling +escap ing +aim ing +plu mb +bi stro +be asts +concer ning +bo e +do pp +shop local +stumb led +âĤ ¹ +naz is +âĢįâĻĤ ï¸ı +gest ure +war ts +us open +hi ggins +char li +hang s +bom bers +° : +fe eds +c ch +st il +nic ola +ðŁĵ º +clam ation +tro pic +af ro +ou k +expen ses +der rick +al ine +fa w +reg ard +im er +sat in +thi um +ry der +pear l +te ss +mm mmm +sen ses +ðŁĩ ¹ +positi ve +exhau st +occu r +nor ris +lil ly +is les +direc ting +yo fficial +count less +sam ar +on stage +flo ck +mir rors +arch er +mo i +k d +vi v +in os +si kh +le i +sen sory +br its +kno x +chest nut +op y +coli seum +z af +di vin +adap ter +:) )) +tem ple +ku n +hel mets +t df +gu ide +m old +o ids +lu ther +he is +monaster y +sp ree +k lu +brit ney +jagu ars +gre ats +c cc +ky rie +machin ery +cric ket +re ro +ab o +aspir ing +semi finals +ale ss +sig natures +var d +me th +her bal +hol den +king dom +ap or +reg gie +ore o +palestin ians +em mys +sec tional +ro i +ney mar +qu el +cu ll +l ka +haz el +estim ate +ul ties +go w +be a +purch ases +bel ts +protec ts +m é +gue ssing +bb o +clau dia +fr acking +jon ny +el k +cel tic +al mighty +ra je +courty ard +ig i +can es +ðŁĴª ðŁı» +bank rup +le thal +âľĮ ï¸ı +graphic design +vad er +penc ils +rough ly +dan te +m fg +const ell +cam el +j b +bloss oms +en to +balo chistan +cine mato +ill ard +jer sey +con sent +dent ed +con templ +sch er +hol i +lou gh +st our +a yo +begin ners +cur b +v hs +a jax +du ff +av eng +dom est +commit ting +ai red +cha p +hedge hog +disappo inting +freel ance +in land +char ms +ðŁĺį âĿ¤ï¸ı +ai sh +m x +buck le +ti dal +per mit +bo ating +ra cha +kend rick +b ello +b hi +ple a +estim ates +l b +apo logies +jay a +bb l +ast oni +inter state +main taining +el bow +mu p +ep it +ðŁĺ ¡ +viol ations +def end +be h +sl c +am ir +pur i +ti um +fi fa +blur ry +scri m +ðŁĻı ðŁı¾ +ma ple +rel atives +âĺ Ŀ +cho c +con nor +⾨ ⾨ +whi sp +list ings +ma ze +than king +ri dd +grass roots +shi fting +desper ately +gor illa +den i +ju les +stra th +g ley +ja in +bu ick +t anner +ðŁĴ Ŀ +ga e +pri m +it ors +n ano +separ ation +armen ia +bor deaux +ðŁ ħ +pj net +bu rial +e bon +glo ss +re new +gri er +spe eds +comic books +sym boli +pur poses +ãħł ãħł +spati al +no table +ci on +n ps +ho ffman +nor man +rt g +du sty +situ ated +tr an +k fc +em en +nic kel +hast ings +sett ling +gr it +l ena +w aw +art s +gu m +ca regi +le wis +sapp hire +rememb er +embed ded +t lc +bl at +serge ant +el sa +boot camp +bow man +photo graphic +pill ars +direction ers +classi fied +no is +ve er +barre ls +wh oop +ðŁĺ± ðŁĺ± +fe male +petro leum +medi a +e fc +poké mon +ठķ +enthusi astic +var un +pro files +pedi atric +acci dents +con rad +jan g +jo jo +ac or +ob server +l f +live stock +for gi +fo s +el m +an and +go e +c ere +avoi ding +gri t +om an +thank fully +scat tered +nick y +cylin der +chees y +di ver +mahe sh +cav es +ear liest +qu inte +subjec ts +b end +gul f +vocali st +glu e +pat ches +un stopp +sny der +demonstr ating +pi o +hor ns +wic kets +and the +r ama +yo on +stra ight +bed time +or ang +bul lets +sa urus +min ers +inci dents +! ... +ðŁİ ¸ +ag ers +hand les +stat es +in ity +d ons +incredi ble +emin em +avi v +ru dy +moz art +folk lore +appli ances +mt l +fre y +di as +hu a +page ant +stri ve +im prison +bul lish +r ana +al erts +bb mas +hy per +derby shire +re cre +re dd +debor ah +cosmo s +law son +mel anie +psy cho +ho or +doo dles +sni per +shad y +man tle +canadi an +new year +inter actions +separ ated +cor ds +spiritu ality +ap u +it o +p ct +pel osi +rebel lion +se iz +wor cester +sec tors +ul i +san ta +Ð µ +ðŁĩªðŁĩ ¸ +bi ased +class ical +gam ma +dee plear +emer ge +back er +sur ance +hand crafted +ðŁİ ¥ +franc is +mill an +ic i +cro wn +wo w +stri ped +un fair +relax ation +³ ï¸ı +embrac ing +she alth +pale o +martin i +dist illery +wr ink +or k +na th +hay ley +cour thouse +si ber +sa di +quiet ly +mel t +m sm +me h +smart phones +rel ent +pp ing +war wick +co logne +gli a +cot ton +pro g +lon e +ip sw +star ters +expan ds +u mp +su ed +ski pper +infe ctions +ing le +à ¡ +cler k +demonstr ate +ac ar +ðŁĺĤðŁĺĤ ðŁĺĤ +ti bet +bun s +alo m +demol ition +ssi a +g st +[ ] +so ar +âĺ Ģ +ðŁĺ ª +ðŁĵ Ĭ +dee pest +beyon d +are t +att ends +activ ated +di mit +âļª ï¸ı +high lighted +magaz ines +rum or +az za +steph ens +dol ph +sho ckey +mat s +we av +mel an +serv ers +tra um +ku sh +æ Ĺ +bab ys +pa z +a al +la use +break ers +canter bury +ul ture +mi ri +euro s +tane ous +impre ssions +du tch +il d +gh i +pur due +adequ ate +l p +sy ner +ang ler +du rable +gal ore +ro wn +mg mt +ðŁĵ Į +lu cia +âĺij ï¸ı +zay n +bor row +. ( +north umber +cru sh +eng a +su sh +extra vag +t out +ma hal +ali stic +ther mo +gall eries +es se +chi bi +attrac tions +lex ington +legislat ure +docu mented +resi den +brow nies +w f +st ool +plan ets +sho ppers +conduc tor +ms p +tr icky +fru ity +end ra +feel the +whi pped +hair style +re fer +oo k +oc topus +audi ences +ku mar +after no +op tim +c fl +ni p +gen i +alpha bet +ann ab +lam in +accep ts +l ng +ðŁĺ « +t ine +ac om +cheer leaders +t k +gr on +v g +k ung +ja x +dha bi +r ss +mack enzie +beir ut +clean up +gy psy +st ell +bur ger +hurric anes +educ ation +st ina +âĻ¡ âĻ¡ +unfortun ate +jere mi +bad ger +at ers +: âĢ¦ +ter ra +subli me +stu d +y mca +mr u +duter te +bren nan +bul b +mel o +yl on +hack er +c red +gu d +as an +pad illa +embroide red +vietnam ese +pione ers +projec tion +re boot +id c +an ey +pri mer +suff ers +win ding +p on +sto day +mor n +u ch +all in +adid as +eliza beth +tu ck +o graphy +ðŁļ Ģ +be g +os borne +ghet to +r h +cn n +ir ma +ma kin +cab les +mur ders +oc ks +inst a +al as +si k +cu ff +la re +foo dies +o vic +at om +geome tric +em pathy +ภµ +cent enary +newsp apers +administr ative +ðŁİ Ĭ +sti ve +contrac tors +le tt +tas mania +awesom eness +den sity +ve en +prince ton +frequ ently +re ject +gh i +modu lar +ceram ics +sh ag +ki wi +can vas +sweat shirt +an j +ti mm +napol i +il er +appe als +hamil ton +ma yo +we ave +arrang ed +whar f +occu py +b vb +as aki +ot ter +nor m +vi es +de tox +tion al +dere k +id ad +ad missions +constitu ency +u pper +woo t +allo y +se ve +lu b +un comfortable +ed win +ab re +d wight +ar che +virtu ally +sp ol +pri e +ai i +er r +swit ch +bar ack +se ok +cou l +wn t +pou l +o live +caffe ine +cardi ff +notor ious +de mp +ex cess +bar r +t ford +a jay +bump ed +my thology +shel ley +fal con +shakespe are +must angs +no ted +bon e +civil ization +sy d +par sons +un official +hy ped +sp ends +oppo sed +v ings +space x +noti fication +deci ding +bio tech +out si +sal ah +! . +fe d +ss y +c ms +bad gers +cr o +ela ine +n ba +dy our +n ant +honey moon +climb ed +conom y +ath a +m ell +ne bula +nature photography +juli e +bm x +inve sted +mon o +lieu tenant +wat kins +techn ician +o se +ka e +ì Ľ +mc queen +pre ach +trav eller +flexi bility +ze bra +reta iler +p ant +ben der +brand t +squ id +war rant +veri fied +cas s +pier cing +hon ours +t ying +mor ris +kis sed +op rah +panor amic +me i +splat oon +wich ita +ari as +gal li +indy ref +good times +athe ist +confe ssion +ow ski +re pping +ad ditions +mechan ism +z im +j ans +su f +cho pped +beg innings +vitam ins +ãħ¤ ãħ¤ +or th +po les +ru b +antarc tica +indie film +web cam +ket ch +bre tt +cle ment +her on +defe ating +hydr o +buc ket +wand ering +sid ney +future of +b inge +on ies +knock out +administr ator +syn the +l ent +jan i +bar ley +premier league +ner ds +cr m +bra s +bot any +evol ved +rot ter +ro wed +tum or +weal thy +Â Ń +mon arch +li shed +da hl +ðŁİ ĥ +bu ch +ken yan +Ø § +red ness +assemb led +se mit +hud der +shro p +ran i +lear ning +mor y +iti a +geo graphic +worl dof +f b +pho sp +boo gie +am ped +? ... +che w +dwar f +ar us +s sen +ru sty +recru its +h k +gar de +app lause +vol umes +invol ves +ta c +hand bag +trans late +ffe l +se ym +aqu atic +trans fer +zo di +and r +acade mia +cr ater +te z +ar se +adap t +col oni +snow man +mal i +hang in +di schar +oy sters +pho e +colon el +w ba +hispan ic +thri ving +sh y +ag les +sales force +cre me +so les +la fayette +â ī +ter ia +ach a +sp erson +go go +car ly +the ore +am ore +vo x +af t +ãĤ ¹ +stap le +mu ffin +di agram +ino x +su stained +av ent +me ta +arbit r +dec ay +ado le +Ð ½ +ec ol +ph o +n k +o cu +gr anny +ç a +luxemb our +stad t +alber to +le vit +am as +d x +or phan +co bb +as c +lo gy +immen se +chan ts +off line +p ent +bre x +w inger +plan e +i el +nichol s +ca thy +nar uto +low ed +/ // +ignor ance +cat astro +you ts +sch en +buil d +haz i +s ine +critical role +du g +dete ct +lo gs +en amel +stpatrick sday +ed die +co pa +cigare ttes +ho ff +kay a +la goon +ra pha +air borne +choo se +puer tor +ke v +gui ding +fro sty +bor ough +mir a +ðŁİ Ĭ +cade t +anu sh +yo gi +e ger +fl ing +slo pe +nin th +we ston +foot wear +f n +may weather +a am +pla in +stair case +witne sses +work outs +ro bust +dex ter +co hort +ðŁļ Ĺ +sp ell +ha ze +o om +organ ising +wild fire +cont acts +av on +min o +upd ating +ðŁį » +li thium +ing ual +k is +au ga +lo com +de duc +u da +th ak +boy le +mp er +hot tie +eri k +re vised +is la +travel photography +oo za +en qui +confe rences +clo ver +g room +cur ves +live on +per f +displac ed +bo log +xx xx +ðŁĺ© ðŁĺ© +te al +ve ssels +rain forest +cal ci +pan ther +gira ffe +ta sted +imag ery +pad res +day time +bas s +ri pe +opio id +nu e +vin yl +invent or +sen s +process or +mu t +gad gets +bibl ical +shann on +jacqu eline +car y +the resistance +ali en +n vi +co sy +bi har +fo ley +ren d +mu gs +fa ken +cl one +ni allo +gra bbed +chi hu +power house +n tt +chero kee +spon ge +imple menting +rh ine +le one +ðŁį Ģ +pret tiest +infra red +impro v +swit ched +tu bes +con tr +bl k +projec ted +be aver +yo t +bbcra dio +thi gh +per secu +apologi ze +w ack +po ster +oli ver +az a +lou d +( ?) +f the +women shi +spar row +blu sh +us able +sc ales +it ative +peu ge +ne eding +legg ings +glam orous +mat ur +c z +wat t +da b +tam ar +et sym +bau er +heart felt +h n +else where +bir ch +alu mini +hu ck +e me +j l +traf ford +d z +por tions +ana sta +arthr itis +esp n +ber gen +viol ation +yo shi +c z +northumber land +clo sures +ðŁĩ¯ ðŁĩ +smi ley +r w +tel ugu +inten si +gre gg +ve ga +dun geon +south bound +ba il +domin ican +semi final +chap ters +h itch +van ity +trans iti +recomm ends +sati sf +bar ca +queen s +( ( +de struc +stra it +ra vi +dess erts +in tru +har am +k os +fo e +fat ty +pais ley +magn itude +dri dge +com ey +schem es +vision ary +our t +down loaded +ðŁĻĮ ðŁı½ +gd pr +lan i +p wc +gu ad +nic est +stake holders +re ferred +george town +arvind kejriwal +schnei der +in doors +all star +strand ed +gen der +ze pp +ma sses +ðŁIJ ± +pati ently +bl dg +z ab +we arab +vi vid +he ck +d ella +sy mb +je opar +la ger +à ª +comb ines +ne c +br ay +flo p +tx wx +jo ys +pon t +pro found +sur round +mad hu +ma ble +ay r +te as +n sa +open ly +er nest +ãĥ © +to po +g na +anti oxid +ti an +e tr +c ello +ma thi +gener osity +b iting +man ic +kel sey +chee ks +ten der +w th +pron oun +ultimat ely +gu sta +ari anag +ger ry +ble ed +red dy +mic h +mitsubi shi +oper ated +sex ually +ma u +cl lr +vi ds +co c +mel ted +ðŁĮ Ī +q ld +ite ch +instru mental +end game +ðŁĵ ĸ +ener gi +brow nie +tam il +at in +domin ated +pra ises +fire place +sens ational +men a +k arti +un prece +ru pt +ori ental +mc cor +tour naments +scen ter +re eves +prescri ption +sam e +fra u +tru ffle +em bo +roman s +bla sts +techno logical +pr at +b sb +y ar +tren dy +ac l +al ad +ðŁį ģ +o hh +bankrup t +tho ven +regar ds +is er +war wick +vine yards +real m +niallo fficial +do ta +ge mini +to do +v able +¨ ¨ +la u +wre ath +ju ve +nat asha +le ver +lor i +hor ser +cc tv +air bnb +es anders +sin clair +ema biggest +high school +con test +optimi stic +t te +ðŁĴķ ðŁĴķ +ss d +ye e +hel ena +con sen +ric ks +jes se +an ic +ðŁİ ¯ +re acts +ro be +independ ence +vol tage +m ington +s ant +à¸Ļ ภ+-------- -------- +sentin el +ke tt +rehear sing +aaaa aaaa +sof the +stir ling +sear ch +wi gan +stand out +sna il +pent agon +Ä ģ +ch lor +cru st +net any +chemi st +disapp eared +ric ardo +sp iders +bo se +war ren +me ssing +bann ers +gu el +par ach +ma id +coun ted +epi le +bon fire +speech less +se tter +meas ured +rejec ts +nik ki +le ster +foren sic +fab rics +alo ha +pre served +wat ford +deta iling +dar th +bo u +car ly +... ' +tail gate +noti fications +å ¤ +pas sive +trous ers +balo ch +ro ther +typic ally +à ¥ +sp it +wi z +sic ily +technic ally +ex pose +st age +hu bb +cre am +cap s +po ke +sle ek +ju ne +tempor arily +de z +awak ens +l ame +_ - +ji ha +tues days +advis ed +advis ors +exi sted +dis agree +news room +lo sers +world tour +dr ying +al di +har ness +foot print +hobb it +p mln +i ro +que red +asse ss +gaz e +sa b +th ian +í Ĭ +ti f +ob serve +ev il +dra wer +swee p +cor y +co dy +kyo to +cal lum +n inj +lau rent +be i +sket ching +custom ized +du r +regre ts +knox ville +ìķ Ħ +mess aging +grac ie +abun dance +bi dding +bre wed +fl ouri +therapeu tic +alt itude +ho gs +bur ner +elec tro +wonder fully +he ater +post pon +li very +r all +ad as +a ac +sau l +brook lyn +play house +âĻ¥âĻ¥ âĻ¥ +char itable +in y +z ah +compet itions +be av +plu gged +o is +do om +astron om +speci alized +max i +ta ps +cellu lar +depre ssed +folklore thursday +cri b +e mul +ë° © +fi gh +ru z +car lisle +spe ar +side walk +de i +depend ent +lac es +nh s +ðŁĮ Ļ +reali zing +net work +ric he +re gin +re fresh +st ral +pa thology +pla id +psyched elic +hin d +u ka +algori thm +lin king +progre ssi +fe y +d ade +hydr ated +b ant +fam ed +cot sw +bo ise +as c +rac ing +ja vier +ww en +mar lins +poo p +swe pt +toni ghts +we f +ani me +slo vak +âŀĸ âŀĸ +cla us +lem me +cli ppers +re ls +arianag rande +r te +ko t +thal apathy +hungar ian +zu ma +y von +is u +jour neys +clin ics +be be +ww f +n ws +super heroes +er it +sle ague +identi fication +mo tto +ba i +sour ced +ill er +ap i +pri se +unprece dented +dam as +tuni sia +dra in +undere stim +e ther +quarter ly +rewar ding +al ham +wolver ine +cab ine +hyp no +nad ine +hav ana +da e +ðŁĵ Ī +dr on +read ings +b ati +pic o +mer ci +iti an +wal kers +el ope +mi key +god zilla +bur lington +abu ja +social ism +at ility +sh ell +harry potter +g no +ab ur +re leg +fel ici +ro gen +neuro science +inst in +ath am +vou chers +j arre +fu se +def ici +monte rey +de port +mid day +pp ard +fre ed +ame ter +wil t +n ingham +pr att +liber ty +slo gan +o to +pr i +co ated +c pd +ne tt +il las +mal awi +evol ve +accessi bility +ðŁĶ¥ðŁĶ¥ ðŁĶ¥ðŁĶ¥ +or nament +b p +el is +son line +chi ro +fl ick +ib m +ar ak +en ables +gar land +san e +cu ties +tri p +rotter dam +n ys +lam ps +lu cas +bo g +ra ils +travel led +hic ks +en u +sab ha +scru b +hi er +hart ford +fo o +fer nandez +tre vor +mat tress +appo intments +ale j +fe i +o logist +saf ar +oc ta +sr c +sha un +ambi ent +dri c +bi ker +she e +must ache +h ta +bo one +her ty +car dio +bra kes +rec ital +consi sts +overwhel med +cau l +robb ins +im it +al th +ur l +bi bli +on ne +black livesmatter +diffic ulties +tel ang +tall er +ðŁĵ Ĩ +deb ating +bur rito +mo vember +strength ening +bo e +te stam +mirac les +base ball +re nee +ðŁijī ðŁı» +al fa +âĺ ĺ +unstopp able +ec s +g mo +giftide as +path way +fen cing +ðŁİ ¤ +b ham +ra s +sk o +d led +thel ast +magn um +bin ary +wil de +wil der +wh ati +barbe cue +h ism +can oe +kur di +eli ve +advant ages +mad ame +bi er +mis sing +enter tain +air force +y ama +c is +hash tags +j is +ve il +dream y +ten se +may ward +ch ateau +hunt ington +âļ ĵ +v all +up on +bl ouse +dun es +ðŁĺ ´ +fert ility +m ole +curren cies +st u +ber lin +toa sted +div as +wal t +lar k +por a +hit ter +um er +chil led +bal ancing +fa is +y in +or tiz +east enders +h ate +ur al +ap ril +tim el +à ± +per o +sto cked +respec ts +th t +best friends +giving tuesday +be ad +inv ent +im i +nap les +comb ining +tok ens +thir st +ma sc +par rot +sp u +dent on +* -* +t res +subur ban +wid th +si ve +con tender +siri us +lo k +troop ers +outra ge +tur bo +frag ile +me ssed +do h +disc ord +netany ahu +re sign +forgi veness +mo han +mun ch +cam ou +identi fying +enab ling +hot ter +thorn ton +jai pur +ar ya +ðŁı» âĢįâĻĢï¸ı +mu staf +maj ors +o ke +du ffy +roh ing +til t +ðŁĩ®ðŁĩ ³ +rock star +she ep +hend rix +ra v +in vention +do u +lagun a +gru mpy +sw is +im pe +) ' +you ths +bun ker +st ache +oppo se +indi es +acceler ate +ml p +ed en +w ann +k ail +akshay kumar +su pt +pol ym +midd leton +extra ordin +wil son +australi an +alumini um +way ne +alum nus +mat ics +gri m +er nie +opp a +competit ors +rand all +h ence +decla res +pre aching +sha he +can e +sustain able +stap les +le dge +ad ena +doctor al +bur gundy +decor ate +ren dered +ri sen +pr ank +di or +bee thoven +flo or +ac com +to t +ho dg +touri sm +say in +objec tive +mar kers +premi ership +en abled +camou fla +gi ant +Ñ ģ +smo key +ric ket +pan g +de pending +s ation +evol ving +inter cep +cen sus +tof the +re en +mendo za +trum pet +marke ters +an it +ðŁĻ Ĭ +north western +v la +foto gra +blackand white +che wan +wi g +tro om +ginger bread +k n +ro mero +n fc +or chi +fun ko +sour ce +f s +ra ped +o st +tar ot +ann ually +ðŁĺ ¬ +r ill +del av +.. !! +se s +can n +medic are +ph el +ape x +guardi an +rema ined +r pm +a ñ +story month +instag ood +neighb our +p ing +sem ite +my stic +as cot +mat er +hand ful +dang ers +ti d +ana heim +opol y +sh allow +nami bia +tor ia +procu rement +big bang +announ cements +prosecu tor +beng als +sal le +en roll +ga stro +sugge stion +ba k +ha ul +budd hism +berni esanders +flu te +fati gue +cyn thia +cho i +ir win +gu a +str ous +h p +ba p +satisf ying +play a +ðŁİ ¼ +inst ap +al ice +t p +irri gation +ðŁĩ¬ðŁĩ § +in tric +clu es +ple x +sa x +he pat +dump ed +signific ance +by u +medic ation +pro v +tough est +corn ish +âŀ ľ +kel ley +u v +si zz +si bling +me st +di stor +diplom atic +aun tie +b hat +son ic +bren da +pump kins +ro ch +black burn +ur ged +shi a +arrange ments +floo d +sa unders +lec turer +nou ri +popul ations +diplom acy +consist ently +ðŁ¤ Ļ +t mund +cauli flower +l ily +vocab ulary +vari eties +coo ker +up town +qu ent +mo sa +re inde +velo city +spru ce +social medi +i ber +volun tary +proce ssed +bal tic +y ang +leban ese +d p +dol ly +arrange ment +y uri +cran berry +kal yan +elev ation +cli ff +pu shes +ìĬ ¤ +sil ic +co wx +eter nity +sla ves +vine gar +glou cester +con tained +breaking news +aga inst +renov ated +norm andy +hero in +ys m +mo ds +gre ek +un di +tren ch +v h +encoura ges +head ache +gr ange +: ' +ever green +Ù Ĭ +reck on +ab used +th ru +cho ice +ti dy +col der +scho ice +ha in +bru m +li ars +bre it +yor ker +sh ack +he idi +micha els +sco pic +fasci st +play ful +ca c +yas ss +sh ad +.. ? +qu en +ram irez +clif ton +pr s +best fan +âģ ł +gener ating +head set +disappo intment +abstr act +bo iled +paren thood +azerbai jan +exhib iting +bom bay +oli vier +ko so +un lea +mat ernity +iz er +si ves +r hu +col l +saskat chewan +fre akin +de k +na g +stab ili +ðŁį ķ +organi zer +bo sses +ar u +u va +at able +ta un +after wards +fert ili +ver ge +az i +mor ph +๠ģภ+jer k +cosme tic +ko w +stru st +ap ache +post cards +for mul +ì ĭ +spin al +jack pot +elec tri +Ã Ń +lo y +gra der +diab lo +ar di +he sit +f w +arch ery +pa sh +the ories +repe al +re live +per cy +âĺ Ĩ +im in +syn chron +sham poo +coup ons +o to +la i +thou ght +luxembour g +mo v +ðŁĺ ¥ +ge mma +se ated +m ga +strat ford +un certainty +shi fts +est o +fo ol +fire arms +cor rie +ki ki +appa rent +p ills +olym pia +fi d +elev ated +de cks +ignor ing +av alan +ro v +whist le +p tsd +milit ants +robo tic +pac ers +quil t +bankrupt cy +lic h +per cussion +celebr ity +al s +( ; +su t +pokemon go +h g +off s +gibr altar +scre ams +billi e +gen ome +mar in +be ams +arch bishop +em in +bedro oms +g ated +ol ly +warran ty +at own +cudd les +gun na +k ic +vi ve +cy mru +nar row +pro b +le o +refe rences +manufac tured +cho pper +brun swick +sem is +don ia +r ye +man o +hur ting +? # +hol li +investig ations +c els +ðŁĵ ŀ +le ster +temp les +sto rey +mc mahon +toi lets +wo of +ï¸ İ +le verage +at om +night mares +victor ious +haun ting +custom er +ag i +yo ongi +mon ty +ver onica +w ur +inti mid +blan kets +volu tion +j m +âĺ İ +am on +jud ith +ðŁĺİ ðŁĺİ +distr acted +dri p +hurric ane +and es +revel ation +tro op +ab leg +col lin +tibet an +wor rying +inter nationally +eat er +camero on +brad or +y uk +ðŁĴĹ ðŁĴĹ +tra k +slo pes +ci er +ne a +ol er +ta ka +albi on +volcan ic +am n +a fi +ob stac +face time +ger ing +n pr +metall ica +organ ic +ðŁĴ ¡ +ki dd +d ances +pemb ro +wash er +m its +om er +emo tionally +tan go +ip o +do cks +scan ning +spec s +tho m +the ology +emer gen +om i +g pa +selec tions +un necessary +ima ge +ter s +induc ed +gi gan +rent als +supp lied +m fa +shan kar +lat er +pa jam +cla ve +Ù ģ +ma hin +carl son +avi an +ano va +kati e +aj ith +design ated +chocol ates +investig ators +gla zed +prin cess +er ry +ra gn +ou rable +hr u +sun dance +peuge ot +steam punk +gh lin +gre ase +hi res +z ap +per ce +j ill +tom e +he hehe +joy ful +mae stro +ni shed +gene alo +v ich +p its +fox es +good man +emer son +lo bes +con verse +o ats +thom son +ra him +mal ware +ah i +man kind +re sin +im g +sw ood +kin der +sc roll +ar a +sak ura +ro bbed +xi on +ny a +c ism +ce dar +be in +mour ning +tor to +heath row +done gal +bar b +hydr ation +k or +elim ination +su pdates +hill s +appe ti +star red +ko m +gw en +dd d +cra y +sc anner +personal ised +seren ity +re design +meta ph +box ed +judg ment +no se +ë ¹ +er ad +ac ne +supp liers +ener getic +v om +as ap +ðŁĶ ¸ +ir vine +hat ch +la ss +ad ren +waff les +accur ately +ici o +itt le +se un +occup y +web cam +thene w +ent es +ga i +j w +accoun table +vis or +ir rit +licen sing +hudder sfield +gen ie +ðŁİ ¾ +atmo spheric +ten sions +spart an +clif ford +ol an +north bound +ame en +cen sor +u el +ster y +$ $ +far rell +hy ster +cl t +se dan +rep lied +descri bing +micro wave +sla b +pro sp +assi sting +ru bio +e than +hh hhh +gu ay +z man +ra ise +roll ing +o e +n ile +ambro se +scar borough +hero ic +coo ks +mor t +chop ra +ðŁĮ · +to b +shav ing +stac ey +dor m +motor sports +wi ki +fol ds +sp iced +stress ful +liter al +fu dge +pe ggy +wa ite +tre sses +se sh +pr ic +ðŁİ ħ +fri ght +r va +mumb ai +po m +tt v +cel lar +tom e +andro id +dor is +tsun ami +tin der +o ec +m wc +dor tmund +no thin +l iti +so u +believe in +at u +kno cks +mag ni +ss sss +ro hit +ine ws +ang i +m andy +ke ttle +intermedi ate +av ant +cur l +endor sed +ori o +ur t +consider ation +wi res +shel ters +b ino +vik ram +imple mented +ly dia +bu k +paro dy +c news +under graduate +canu cks +sam i +polit ically +ro tten +gh z +tex tiles +over load +moder ni +recre ational +fli r +bat on +typo graphy +ov ation +intrigu ing +pilgri mage +al ge +ad ays +tcm party +sp elled +cur ls +boo ze +ste m +ann es +ir ls +spon ge +sho pper +sig nation +bra ss +mi stress +le ah +beg inner +lau derdale +augu st +pre school +ta ping +tai pei +execu tives +b d +rhe tor +esc or +immun o +deeplear ning +stat ues +it us +manu script +ly ric +cor vette +mol ly +la ge +de p +cn bc +le st +je ssi +fi fe +griff ith +oppo sing +ran g +dr ills +respec tful +p ity +d ell +har ding +play boy +blo ke +shut out +k ili +o sp +se attle +bc poli +mis es +journ als +team ing +es ther +fre ddy +Ķ ï¸ı +metr ics +no tre +gar ry +for ty +navi gate +perio ds +bened ic +j id +da w +ance stors +restor ing +con g +aller gy +tit anium +c ence +lean ing +ab bas +v ast +uc f +roof ing +e man +seve rely +vo gue +ve au +in bound +d z +tane ously +stret ching +man chester +dr yer +dav is +kan th +the game +it ted +re tain +el les +conge stion +frat ernity +ol lie +lo ki +fre ely +cho o +pon y +sc ep +tab ly +bal t +rock n +di me +lo gging +ðŁį · +ad u +ha voc +water ford +char is +swee tie +run ning +ner d +erdo gan +z ara +weigh ing +fif ty +pre cise +low ell +kurdi stan +r yo +or th +syn th +lin ers +phenomen on +art illery +il legally +constru ct +nostal gic +gar th +al ta +shel ton +a sean +w ander +dur ban +di versi +bon o +cl on +le man +sh un +obstac les +appet ite +fe eder +respir atory +di xie +formu la +an to +so ber +extin ct +au c +ing les +legitim ate +; ; +min nie +ipsw ich +dram atically +ðŁijı ðŁı¼ +ingh am +milit ary +mon et +us navy +for k +dun no +play er +q otd +st oo +ex or +ethiop ian +film fest +pe red +c ate +sau di +in ner +sin cere +tion ality +ale e +de eds +cooper ative +ir onic +cro cod +br ary +post season +cam per +can ary +e in +exten sions +nb d +sher wood +spo kane +hu mp +jit su +ê ¹ +dar yl +p si +stab bed +offer ings +expe cts +cav al +body building +fr aming +f ca +ye arly +bom bed +sk il +resear ching +jud iciary +gree ted +tu dor +mil o +innov ate +ðŁĺ Ľ +r hs +ru by +contribu tor +fam er +soci ally +m lin +fi ery +ut ter +beau t +it os +de voted +rain bow +bar ney +pe ren +ar jun +r na +gab by +ut i +hann ity +pick le +ser v +qu akes +pp e +fe m +wh itec +j n +victor ies +ðŁ§ ¡ +gol fer +congratul ates +resul ting +mechan ic +ur ve +cen tered +kie v +an s +in cub +< < +c mo +bestfan army +dap h +en ham +on cology +ku sh +t xt +ori ented +fashion able +c sr +sa hara +r ack +pd p +han son +ภĩ +ti ers +ra r +pan am +in sky +sa hi +testam ent +asth ma +in her +fisher ies +or der +ho we +gall on +ep is +suz anne +drow ning +paneli sts +ðŁĺ ² +ë ¦ +al ach +commemor ative +at tribu +ðŁij » +mo o +visi onal +week sary +gu st +ak in +poin te +ee e +di spar +ni pp +dent al +st all +pi an +bor e +ul ster +tic k +ir r +tae hyung +micro phone +bermu da +ga ard +el er +plumb ing +hu gely +âļ« ï¸ı +race way +cam bridge +mar cel +burn ley +to ast +holly wood +fa sting +me red +hib ition +ca pped +benef icial +ow ning +cont amin +arab ian +to on +cap ac +hul u +sm ir +nutri ents +se in +graph s +con ditional +ðŁij ħ +or ac +play in +nor the +tor nad +mar ian +ju mbo +lex i +incredible india +road to +uk one +confu sing +sp h +shan k +pi ed +mq m +positi vely +sher ry +path ways +consi ders +tof u +argu ments +resil ient +che tt +with dra +ter o +ated ly +sw ana +he b +fli ght +har ley +decre ase +kind le +book shop +³ ï¸ı +marty rs +sm ur +mc cl +concer to +sti me +rejo ice +app lau +cle ment +mer kel +jai me +im mortal +isle of +mar co +youtu ber +stal king +me too +st ack +sp ouse +u st +lu v +âļ¾ ï¸ı +eque strian +ev ing +fl in +nick name +the big +as ar +st acks +wal ker +bor a +kidnapp ed +hur ling +humb old +rec alls +co pper +ann is +se o +mer ger +mu ir +ad dy +ðŁĴª ðŁĴª +be x +cr acy +con an +congratul ation +mid st +âĻ ¬ +for bi +op tic +cr ate +crocod ile +mad agas +secur ing +ast on +o gue +savi or +salis bury +love it +fuji film +cast les +as st +ar rows +sp acious +tr s +poly vore +progre ssion +m ri +nel son +bi m +indic ator +o da +pe pe +re signation +gu t +sne aker +log ically +az y +are lla +te aring +jo shi +ssion ism +q pr +mari ah +p x +ble ed +mi an +med ley +we iss +ker ry +gat ory +at al +madi son +av enger +nab y +pl and +gi les +fresh water +d ington +ta j +demonstr ates +n tv +bul bs +sunday morning +pe ake +souven ir +wa h +ton nes +m kt +complex ity +con den +ross i +b ing +y ds +su k +n go +mid land +ol y +life is +ri pple +mo reno +dd ers +tu s +á ĥ +bou l +x a +hol dings +wn y +shadowhun ters +ke i +asp ire +m ous +ow en +so ak +skir ts +moun taine +stor ming +ch rome +ri ots +sar ato +amaz e +less ness +nav ar +crit eria +ra fa +indul ge +ay er +por to +nam o +........ ........ +yi elds +val le +j h +mac ron +sa ins +dur ant +tra ilers +wo t +confeder ate +sh rin +id ol +form ally +ten e +motor cycles +than g +no de +bang er +dal y +p ats +enroll ment +au ctions +at al +ar bor +lo gos +de arest +trans action +dom ingo +fle a +ser mon +de ck +sin cere +questi oning +juli o +was p +pre tz +armen ian +k ham +inflam mation +picture sque +acci dental +film makers +ðŁĺ ļ +ðŁĴ į +ca sey +so b +yee zy +good will +parag ra +ss ly +fe ather +dy ed +assassin ation +na de +b cs +app lies +femin ine +fe u +ext ent +depu ties +l ack +psy chic +go i +kill ings +pse u +ðŁ¤ ª +un c +mar l +tan e +mck enna +sur fer +influ ences +free way +hack ney +mal aria +el and +te au +rema stered +Ø ± +raz or +gg y +cor ro +lak sh +fla ir +honest y +hoor ay +de pp +am c +wedne sdays +q a +ed its +- $ +se villa +dou bled +human ities +c cot +som os +r ine +af a +si oux +re construction +wel ding +th reads +am ish +encoura gement +po der +bo ck +bal m +p tions +stand up +accompli shments +guar ding +convic tion +ac ion +napo leon +depic ting +att ack +su i +wear able +âĸª ï¸ı +pot ter +esc ort +vis e +to ts +bo on +event profs +angu lar +womenshi storymonth +bar row +sch i +ac comp +ti k +l end +kensing ton +wol fe +st acked +cra shing +exhi bit +wing ed +sab rina +ma sa +k ms +alway s +et t +pla sma +counsel ing +pick les +nfl draft +mr s +inev itable +coura geous +staf ford +writers life +ho s +e j +gh yun +trade mark +adri an +influen cer +coron ation +ra ging +explo red +usa f +excep tion +eu x +tan ker +sw ami +pac ket +ðŁij¨ âĢį +f en +she en +a ero +j l +re gal +nw t +au ster +meh ta +char ge +a ste +b ate +inf eld +racec ourse +collap sed +fle ece +z il +al lie +alternati ves +geor ges +ðŁĵ į +quir ky +fc b +nat geo +philanthro py +bra i +every day +ðŁIJ ° +ach ers +ja an +fin es +q i +fisher man +distin ct +gri mes +nation alist +comm ence +ro wn +âĢ ³ +z ing +f ter +hr w +baro que +bl ender +kitt y +hoo ks +c ited +w anda +consen sus +reinde er +an and +supp ly +me ds +v n +ol ph +rat chet +shel don +secur ities +ë°© íĥ +cro m +mosqu ito +j eric +im mac +dimen sions +â ¤ +di ssi +sponge bob +dami en +steven son +jo anne +del ish +yi kes +than x +surve ys +postpon ed +alco holic +al ised +ðŁĻı ðŁı» +do ch +sen tim +mered ith +com pares +b ago +happy days +mo ss +ãħ ĭ +ne c +gn ment +frustr ated +comb in +ri v +ec lec +col lo +compli ment +actor slife +ct to +nic ar +op hon +apar the +man t +ja de +trol ley +optimi zation +eye on +eco logical +qui st +ep he +ॠĩ +cin co +appo ints +old school +c pr +behavi oral +min aj +:- ( +tag ging +ev al +jo aqu +ðŁĺ « +ha k +de me +jama ican +so s +hy att +hand book +libr arian +hanni bal +pump ing +ch om +f man +ga i +hu ll +respon ders +green ville +n us +vau gh +ðŁİī ðŁİī +ta xi +gold berg +man tra +te ase +forbi dden +metho dist +ati vity +* *** +ec t +mc gr +Ħ ëĭ +se b +amid st +disapp ear +thy ro +phili ps +er ina +v icious +stream er +million aire +ma p +str ick +hack athon +gh a +ed ic +mi ka +pe ck +ill i +anto ine +ar ca +op tic +ma ure +ðŁĩ¦ ðŁĩº +cla shes +man ly +âĺ ģ +al var +and res +me i +el m +ww ww +al tered +l te +ê¹ Ģ +mo jo +for rest +thal ai +non t +spee ches +acknow ledge +ign ite +x factor +ðŁ¥ Ĥ +mead ow +disru pt +debu ted +scrim mage +pharmaceu tical +fi dd +found ations +philosop her +et al +publi shers +bo ys +c ke +ru gged +opti mism +re be +phil harmon +nar cis +ral lies +lu is +go blue +fol ded +un acceptable +optim al +li sa +pol aro ++ . +en za +âĿ £ï¸ı +mon opoly +grace ful +dair y +du a +diffic ulty +judge ment +o si +mer sey +flu x +new found +ter ns +dimen sional +in vic +al ba +am it +abudha bi +alger ia +autom obile +the ad +lo tion +acceler ator +vac ant +iti on +lu f +al ic +pl l +bla zing +ba z +sen e +ðŁij ¼ +villa ins +direc tory +eis en +to ck +broch ure +ri pp +hb d +zayn malik +nic he +lo lol +certific ates +mor se +fac up +x ham +un wanted +im ports +carne gie +fan sign +mo u +r alph +destroy er +sw ing +trek king +cili ation +pit bull +g aps +ho well +defin itive +mc le +f ps +et z +bol ly +lyn n +gan o +at ure +fur suit +co il +na v +but ts +tro jans +eu re +en ko +sch umer +horri fic +install ment +br b +subur bs +a bel +vi r +de sh +cun ningham +ðŁIJ » +span n +sch we +ke mp +tr u +ste alth +qu es +le w +deli ghts +ko ch +hu mili +cr iti +il t +sp ells +mi ley +car ic +ðŁį ´ +lc fc +substitu te +oun g +? !! +af fir +predic table +class of +er r +cy press +chand ra +age ing +__ __ +ther land +don caster +el in +yo shi +sail ors +har ris +jo anna +niger ians +h ers +pla gue +pro cra +k no +can ton +busine s +un h +pra kash +c in +bow en +co ating +m als +be gging +smith son +ponti ac +sp ies +dam ian +pl ine +und ant +al ta +one ss +shame less +da q +bb m +wal es +stam pede +ser um +Ù Ĩ +cataly st +x n +ab sc +free zer +ch un +ari os +mc cre +fore head +he ars +damas cus +tac oma +ardu ino +encoun ters +stan ton +lg b +ab as +" .. +ke te +drac ula +ele m +g ne +zepp elin +la brador +pul p +op tional +or n +russi ans +san itation +hil ary +etsym ntt +pen alties +au st +ig ans +olympi an +medic aid +vers ace +va pe +re stra +pe ep +sexi est +st alls +di le +the a +punjab i +pupp y +tuesday motivation +ðŁĵ ļ +the flash +roc ket +mo dest +chihu ahu +on na +k sa +hur dles +ca ve +fail ures +sp lit +bo ho +gur l +disappo int +ho ward +nug get +fran z +stal ert +kaz akh +for getting +sch ri +ag ate +am at +eve rett +du et +veter inary +juli an +ch ills +bra ve +ghost busters +lan do +gre ets +profit able +d é +ti r +ze e +om en +pd x +gray son +har i +fix es +stab bing +swim mer +symb ols +compli ments +po se +func tioning +th nx +gi r +corpor ations +bar low +lo e +off season +distin ctive +marvel ous +nik on +enri que +ky u +ja ws +amo to +lom bar +travel blogger +fa h +ouri sm +tri stan +so e +ce ase +ðŁı ħ +z ac +mck enzie +taxpay ers +swim suit +bl o +les ley +kan sas +w ks +ki el +provo king +my les +str ing +kangar oo +galac tic +fif th +s ke +we ir +ll is +mat ory +ðŁĩ ¿ +un ci +re productive +roo ting +ti des +gad get +.... ...... +alex ander +bow ler +scre w +apo log +eri ka +wal ters +shet ty +lan e +ban ter +as ant +me so +v ain +" "" +us i +fer din +accomp lish +man sfield +bom bar +collabor ating +cla p +it ure +s da +smo ky +na k +im person +car la +com ra +bur gl +lo co +ti es +in hi +trac ey +se is +diss er +rr rr +dra y +prote ct +cor ona +hun ger +ck en +c eli +trou bled +predat ors +fic tional +shav ed +riche st +metab oli +ful ham +gro oming +mono chrome +wa sting +as co +ast e +ti sta +remedi es +ung soo +south end +perman ently +bu mble +procra stin +ident ical +practic ally +ma scul +su ke +assu red +val erie +devi ant +grizz lies +thi er +pur a +ne pal +not ts +bil ateral +spo il +car mel +cine matic +ph l +ni fty +ma o +hypo cri +la ser +pan try +mathemat ical +el isa +coordin ation +bel mont +a it +radi ant +bo iler +man g +f ag +cr c +h ams +br in +â¬ĩ ï¸ı +famil ia +âĿ £ +sab er +ru pert +gg an +rit z +mic h +sal ford +le vi +gra l +ðŁĴ ¤ +n ino +ce d +business man +ul tr +sim ply +compre ssion +pa ins +hal t +ë°©íĥ Ħ +landsc aping +n f +croo ked +er d +itt in +ddle ston +sur passed +ino a +da g +bl en +exten ding +at ing +al gae +ball er +u mar +snoo ker +col lu +flo wn +thu b +ridic ulously +ki sh +op le +di re +as ser +ari sto +sc iss +h ating +trou ble +syl via +suc cul +plo ts +sincere ly +al er +laure ate +br ack +att n +rif les +me to +collec tible +cu omo +conte stant +consist ency +ant z +rang es +abig ail +de b +mini ster +grow ers +an oo +hoo ver +dream er +nu cle +resear ch +mi y +sha hid +ma v +d honi +cin i +do j +hin dus +part ying +dal i +alon so +inform al +clark son +it ton +ki an +cit yo +mor i +la sted +as pen +libr ary +susp ici +qu at +den ial +fol der +ch ori +swee ping +eni x +ðŁį Ĥ +Ø Ń +nas car +handmade hour +mou l +heat wave +em er +exam ine +ib n +gr ind +po v +tion ist +m bo +she ila +integr ate +om es +take away +cer v +con nie +tic ket +ce led +bi en +visu ally +madagas car +sor ry +gu i +park run +tra its +la be +pois oning +ॠĢ +vi able +bohemi an +denti stry +bad os +spr outs +mask ed +te ddy +ðŁĺ · +sa f +sa as +ji ang +ti ght +spe aker +withdra wal +bc n +as signed +class rooms +fle ming +ðŁĴ « +super girl +tot als +table top +e books +horizon tal +cra z +flu sh +j ard +c dc +er son +ãħ ł +green wood +ni h +co x +ad a +lit re +go ing +v icky +cur ved +lou ie +gra ins +hy e +lon ge +reme dy +tra inee +san jay +super stars +ma ser +man u +s age +wh l +ðŁĺĤ ðŁĺŃ +ðŁijį ðŁı» +m sd +en z +rab hu +j oo +gh u +ac er +e po +resurrec tion +justice for +bl ended +mo da +avalan che +france sco +re spective +g s +ye ast +wel ch +devo tion +ge tin +athe ism +am ic +carol yn +lo c +ld nont +ave c +us da +le gged +bra very +b lower +cow boy +he h +sti ble +buff al +chann el +run chat +âĺķ ï¸ı +ide ology +best seller +y oo +pe anu +bon ne +fel ic +edi son +fr actu +naren dra +pp ets +seym our +ri viera +he ctor +necess arily +bi anca +soci eties +the best +w g +sent ences +win k +vacc ines +pal ooza +jam ming +as f +mp us +agre ements +ec k +ba c +hon ore +com pul +wild cat +im posed +yo ga +hud son +can celed +l ich +fu zzy +es que +ch uk +w vu +se k +fli pping +r hon +wi shed +wh a +cap ability +len ovo +ìĨĮëħ Ħëĭ +vi vo +tv d +nor a +sil k +pas adena +yo semite +valu ation +clo cks +u ber +mr c +dar kest +au bre +ss o +bell y +wrest lers +kill in +lou der +buck ley +ge el +ad on +un s +appe aling +ðŁij ¯ +semit ism +list ens +fit z +ãĥ³ ãĥ +ny lon +ar ty +seem ingly +hal a +su ited +et y +she ds +mu ffins +ap ric +um ents +u ta +jam mu +chelse afc +star z +yo ko +roo t +clean sing +di ar +pione ering +ihear tradio +dig iti +fin dyour +can o +ðŁĴ İ +z ol +spac ecraft +six ers +moi sturi +b ile +ti sts +hor ton +rang ing +colum bi +mete oro +senti ment +ep l +foo th +text book +drain age +r ly +sc ue +imran khan +ðŁĴ ¸ +margar ita +ed dy +predic ts +gamer gate +advis e +growth hacking +love you +ug and +v f +beng hazi +s later +ne wor +ch el +independence day +p np +cul len +hoo dies +num bered +brit t +t sa +kl tu +s ages +mom o +onep lus +col l +gu ts +w ta +mesm eri +enh ancing +chiro prac +j is +teen agers +m one +constell ation +sweep stakes +e ze +slovak ia +la ye +pear ce +wa ver +po gba +k ron +sur geons +mar x +ti d +gg a +desc end +p ours +upri sing +wal la +sab bath +bachel ore +mack in +k am +peter borough +hor a +ðŁĮŁ ðŁĮŁ +think big +r j +hy drau +sp al +univers it +ðŁı ī +mail online +league of +ten ants +w ally +lan ce +heav ens +dd r +bol ts +am ir +i phone +ci gar +en du +re i +el abor +r inging +john son +characteri stics +sal oon +algori thms +tal kin +m tn +di ve +region als +ff ice +hat i +deviant art +so tto +shir o +l ama +k we +f aded +por ting +tu mmy +est ates +buen os +ðŁ¦ ģ +beli ever +pen etr +dar n +sp ite +can opy +fashi oni +t illa +pet als +eli jah +bra wl +marty r +ë°©íĥĦ ìĨĮëħĦëĭ +mid town +eric h +d apper +sm town +me gam +ww w +le le +on s +cat fish +fir th +fossil friday +ball park +th aw +pot ent +illi e +cre ep +car p +so ap +gun dam +infe c +yy yyy +ठ¨ +z ag +rit t +calcu lator +bo ca +ok o +to ad +threat en +refin ed +olym pic +accompli shment +bacter ial +a ji +tat um +feli z +she ed +j at +th ic +jam al +ðĿ ĺ +lin a +ðŁIJ ¯ +jo king +yot po +pin ch +ak ron +her b +motiv ation +li a +ho stage +cre ek +gam ble +russ ell +patt i +fo tos +c pc +bro ken +back the +cla ys +u mm +stock ton +mat ernal +ü r +la kel +cent ury +be k +infe cted +ภ¡ +smack down +man ned +ta hoe +sm es +bas a +su la +augu sta +. * +rohing ya +gre ed +counsel or +silhou ette +gra vit +cla use +' - +bo bc +occa sions +now adays +dic tat +be ard +n ally +brigh test +kab ul +inc india +dhan ush +archae ological +che ape +mizz ou +d hi +ov ski +bax ter +asse mble +à ¢ +gi gi +ac am +wis ely +haz ard +north ampton +âľĪ ï¸ı +me th +bla sting +re unite +mu lus +ali zes +t read +mil a +ed ward +ko va +pe sto +ðŁij ¶ +vit z +hydrau lic +refurbi shed +mo tel +isab ella +hom me +sever ance +uph ol +mis erable +f ari +lat ter +ef er +crack ers +es l +ac io +yy j +in an +ec b +z ind +pan as +tru cking +re ed +sh aker +burge ss +em pire +ag nes +n ington +art works +fr s +ti le +bi ome +eu n +ch ong +americ ana +god father +go blin +i shi +! ). +temp ted +gen omics +mand ate +ck y +ðŁĴĻ ðŁĴĽ +som ali +br andy +in ven +spoke sperson +pc b +yu an +h g +fa z +starwar s +ro wan +blue grass +don g +d day +trin idad +er ton +ban ning +re tention +cu red +tober fest +re set +we is +deta ched +behindthe scenes +immun ity +ph a +bra y +ðŁij ½ +ran cho +ram say +est onia +nd tv +] . +cab aret +tar o +d v +show cases +plu m +ðŁij ¸ +son oma +pre pa +memor ab +e stu +drive way +u les +magn us +x r +nn n +much as +en ge +stre amed +fore stry +audio book +tro y +reck less +kil om +ru ler +ra k +proce ssion +i ons +po ole +noc tur +wh s +farm house +per a +par me +hypocri sy +s ics +v ant +cas k +holi stic +au st +Ð ¿ +in do +ðŁij© âĢį +di so +disp atch +ol sen +make it +en nis +cent re +ar range +ðŁĮ ¼ +sal ted +ea siest +f ate +reg atta +mo zz +ac an +sin i +g ically +ch ops +chick en +work in +ha gg +invol ve +wee ds +book day +wake up +ky r +michel in +fu ss +re juven +vac ancies +incar cer +m st +sc ents +sovere ign +kick er +à § +bo d +âĢĶ > +sa h +mob il +shrop shire +oph one +dress er +mis suni +hep burn +i mo +foli age +diagno stic +as san +cycl ing +guil t +c sa +puertor ico +win elover +wake field +do ggy +k he +pa pp +co g +al lot +cu ck +poe tic +mi o +re vit +mag ician +ç ¥ +ant enna +west wood +mber g +lux e +oat meal +Ø ¬ +te at +ffe e +sear ches +l ly +plu to +el on +let tering +inno cence +fa i +ann on +telang ana +ma it +neu ral +can ni +ar oma +a stor +fe x +co cac +mon etary +f ent +un sure +' @ +indi rec +teh ran +isol ation +li bs +make up +merce des +ff y +he tero +de o +sco m +cur sed +veteran sday +franken stein +shre ws +de co +ge ese +lefto ver +ha did +vari able +acade mics +carol in +under going +vari ation +na h +ssi er +gamer sunite +pur suing +emer ged +ll ers +control ling +ro aring +mete or +vol t +daw gs +be aver +is life +bathro oms +aci onal +pre vent +lake district +in als +y ani +gra bbing +sac ks +le z +sw ay +k ool +time s +klo pp +la de +con cord +resul ted +revi ve +recon ciliation +ol and +az z +gir o +mand arin +de en +nutriti onal +is coming +van i +aw www +der ived +love your +stop the +shou ting +nov ak +ðŁĻĮ ðŁı¾ +lo af +displa ying +sunday with +ma guire +ch eri +ðŁı Ł +re match +qu ic +Ú © +y in +ðŁĺ ¹ +ili ve +z ip +our ke +down loads +sw at +missi ss +care rs +t ment +proper ty +hahahaha haha +gi bbs +sur rey +ar ise +tic ism +sti a +ir ling +fro g +co se +bas sist +fore ig +lea u +pil lows +hol la +eli e +disclo sure +peanu ts +inte ch +ww c +plun ge +trium ph +cor i +sli ppers +ðŁĻı ðŁĻı +neutr ality +ma re +hair y +gang ster +hu mming +cust ard +mer lin +ale a +s by +dam p +mo han +ver bal +j st +gu tted +b jor +un finished +ðŁĩ¯ðŁĩ µ +un happy +âļ« ï¸ı +by pass +at su +fis cher +sa v +afric ans +re use +mid way +demo lished +ger rard +her cules +Ä Ł +medic ines +cl icking +sur round +jo ong +wav ing +tri bes +wet lands +offici el +argu ing +l le +do va +su zy +club house +ne gro +ob tain +ga o +gl ance +assi st +ch os +ãĤ ¢ +âĺ ķ +adri d +occur s +st ans +par don +livel i +emplo yed +re visit +ff xiv +bb le +ne aring +min er +ðŁĺ ¹ +giov anni +up to +mar vell +mar se +to wels +cb n +engine ered +y elling +spart an +si ans +ðŁĻĮ ðŁı¼ +se v +coyo te +sta di +t cm +app en +shenan igans +open access +so aked +ma squ +le vine +stro kes +l k +aparthe id +hipho p +char don +may may +ha asan +stri pped +fr o +scri ption +f ton +h f +pri sons +marsh al +ķ ãĤ +an cho +com promise +classi fication +buzz feed +bblo ggers +deser ving +) / +s way +ob o +camp ers +poder nfamily +p oured +bri e +squir rels +se ize +: # +le k +ti mb +st acy +nas daq +repe atedly +br at +mi ghty +competit or +mah one +de si +o ke +bm w +shi e +f cb +cheape st +minim alist +par amount +n ate +har as +insan ity +lat eral +ment ality +mo zam +ta pped +yad av +u sp +b way +the od +bil t +ra ids +em press +adap ted +pat ron +nut shell +ag ra +be aded +sundaywith marsha +vi king +proce ed +main tained +thinkbig sundaywithmarsha +sn es +mus ica +to wer +ch ab +bo k +sm t +insul t +harve sting +windo w +ru ther +be ige +dec al +indic ate +ma iling +ri ft +po le +ander son +ch oral +sp ride +l ili +ev elyn +imrankhan pti +.... " +ke red +un dp +water falls +se ars +le mans +world series +ri el +ani e +app ar +score rs +lam p +a than +phys icians +qu inoa +refu sing +vu itton +unle ash +s la +pat i +shou ts +inten tions +fo amed +europe an +neighbor hoods +me er +man son +du h +br at +con es +bow l +kazakh stan +ठ¿ +in appropriate +del hi +ketch up +ful ton +s ys +consul t +gar field +to go +f ml +f led +b ds +facilit ate +ree bok +selfi e +elev ate +activ ate +bi ble +ca wx +b ys +cam ille +sy ou +sk ool +her t +w bc +ple dges +recor der +po sh +ac re +so aking +mat il +v sco +shoot ings +pla r +e con +ðŁĻĮ ðŁı» +rashi d +u bi +ðŁ¤ ¤ +sw inging +wi pe +rap tor +m su +music video +dur ham +at tic +apar ty +fe tus +activ ation +aa z +motiv ate +ðŁĴķ ðŁĴķðŁĴķ +j al +ठ® +ag on +sche er +stal ker +fo ster +az zo +tele gram +vi gor +s laugh +screen shots +entrepre neu +kri stin +inten tion +ch illi +fr action +don a +ge a +tc u +s ite +la k +em il +d nt +bor o +wil kinson +re cu +ato day +t anya +bl anco +cd n +brilli antly +g cc +ac c +evacu ated +ther ine +den ny +cait lin +she pard +pou ch +hand held +sou theastern +ha a +à ´ +re solutions +led ger +sr in +r ar +shat tered +chim ney +im with +mete or +hand led +ra ke +town send +en han +shi py +duc t +tw x +inflam matory +war hammer +theat rical +gro s +sk ar +sco tty +ni el +tit o +tin i +conne ction +_ . +goldeng lobes +sha q +ðŁı ³ï¸ı +hall way +fron ts +effec tiveness +gla ston +d hs +ex pi +to h +c pl +sc s +re o +ha g +resemb lance +hor an +abu sive +qu er +virtu e +cho lester +a q +shan e +m ce +carri ers +di stress +re wind + ¡ +voo doo +int act +ann o +ðŁĺ ¤ +pi led +adi a +ãĥ ³ +en ow +di gs +light ly +goo fy +turb ine +governor s +con te +re open +pa h +i ve +cra fting +swee ps +jo di +an de +zu cker +kaw aii +o ko +v ai +out line +kri sti +ts n +insp o +qu int +fil thy +lyn ne +listen ers +depar ting +or d +t weed +, & +ale k +sel fish +nor ther +recogni zes +i ps +be s +a ed +w ills +pe at +surround ings +mon uments +ais le +be cker +la v +quant ity +v ah +helicop ters +tu cked +alv arez +sha pe +o bey +ad diti +road side +m ite +bl ers +ep age +j au +ignor ant +b ins +lu lu +x o +c fo +ee eee +apprentice ship +shef fiel +to i +ho k +faken ews +deplo y +aid an +husk ers +ãĢ İ +west brook +mi ster +confi gur +car r +fic a +proceed ings +ha w +ste ak +mur derer +pay day +a jo +p vc +don ates +bi af +nom nom +be it +k ali +x rp +ahmed abad +se mic +che y +x tra +an twer +head lining +squ ares +roun ded +flu ore +bol d +disa sters +am oo +gener ic +cran es +brief ly +gi g +auster ity +anticip ation +for ti +treas urer +cann y +ce cil +dete cted +check list +ภ§ +pam ela +bar bados +an field +hear ty +tx lege +peren ni +arro g +ing ram +âĹ ı +ty ne +spo on +r ation +am ba +m be +cam el +h hs +york shire +reflec tive +fre aks +to k +ju do +partic les +du bs +ban jo +accred itation +prover bs +over dose +inte gral +gu ang +mc s +super car +af b +al vin +ail s +x tre +st aging +tw ent +rabb its +mar o +inste m +dol l +cr ay +sant ana +ble ach +mini ons +che ap +man t +di vers +catal onia +lo is +mat ri +cou gar +kay ak +e gre +p so +a ia +å ® +char lton +tr acked +sc ari +pe tt +f wd +x in +gra vel +br ic +bigg boss +ar den +hu gging +pal ms +st v +li mb +the movie +handic ap +ri me +z ai +stu b +indi a +lithu ania +rhy th +p ita +maced onia +high ered +brid get +schwar z +ske let +hi kes +ant arctic +c ps +mash up +Ð ° +n ell +chand ra +he ir +an us +sher idan +mi mi +muse u +bec ca +an ir +bar rie +dioce se +compar able +ðŁı³ï¸ı âĢį +yuk on +me p +hor mon +mer ic +al f +con quered +christ church +ðŁĴĻ ðŁĴĻ +hazard ous +poo h +cont ing +retro spective +par ame +na ir +con sor +ho tra +astoni shing +cater pillar +u man +ti sm +t vs +serv ic +croy don +mor ales +c g +cu m +te ur +scan ada +s all +magno lia +el ise +th our +à® ¿ +ag omez +phel ps +ë°©íĥĦìĨĮëħĦëĭ ¨ +wh os +weav ing +si sd +pro poses +cro ws +pre sale +econom ies +bernar do +sha hid +air show +mc cann +hor ticul +nr l +du el +mongo lia +tou lou +requi rement +struc tured +ed i +o lives +he a +cu ter +Ð º +enthusi ast +harri et +domin ion +sub mer +ðŁį ĥ +sa ab +nes burg +mo ff +def ended +bur t +rewar ded +gold man +op tics +khali d +house holds +buc kets +ce cil +che ss +substan tial +ef l +oper ation +evalu ate +st n +rece ssion +l ll +tom as +tru ths +ak bar +s words +p act +embarra ss +ha o +ay urve +scrip ture +ny cc +op t +di ameter +sc ented +organi zers +re lat +ha e +dream ers +de se +ðŁĮ » +restric ted +n ale +r hp +dol an +mun ster +ha ired +consult ants +jo ints +hu mil +d ill +relent less +t é +af il +ut ilities +japan ese +condem n +pet ite +colli de +q f +peach es +cou rier +l ore +âĺİ ï¸ı +reli ability +ch uk +ðŁĻ ĥ +stu res +ge ther +ho stel +bi er +- _- +â ĩ +e ze +ta ilo +di ent +blu ff +chu ffed +pil ip +mon arch +e em +bu chan +b ick +op au +ku ps +ภ¢ +pist ons +sp ins +m and +ce st +bur ne +v ile +cher ries +bec kett +need les +pan ch +ë Ĥ +haha h +trou bles +insi sts +do you +g mc +mor tar +deleg ate +in n +g anda +sin atra +ठ¤ +spee ding +pu pil +pre mises +ali gnment +pi kach +as us +j alan +Ø µ +lime stone +fol kl +parme san +ce il +mo y +shawn mendes +ac up +hu st +ot es +med ina +ma di +gta v +censor ship +ar g +swe eney +sy kes +col o +foot steps +cann ed +adv ance +gta online +healthy living +ðŁį ¾ +a ig +p ality +oc s +he brew +im minent +berk shire +jeremi ah +out going +bak er +entr ata +ma ids +gro ves +bo c +a del +m fw +con science +arm ys +nut ella +conte stalert +novel ist +la h +ban ker +marque z +ðŁı ¡ +to ff +out age +gr p +ðŁĺŃðŁĺŃ ðŁĺŃðŁĺŃ +musc le +du dley +nvi dia +mi di +m uni +ess ays +dat ac +car ter +ภ£ +t ans +i ves +public ations +al er +ok wx +il u +cu tt +har p +out law +luther an +br ill +bo lic +do well +green land +be sties +path i +pay ton +gue st +har den +ðŁ¤ © +ann ed +evacu ation +po ised +mc der +b han +o i +envel ope +ci d +ca vi +ta pas +book review +grey hound +âĻ ª +fe ud +lun gs +for te +rai der +ff er +oni x +dep end +yn wa +rel ating +de vs +ðŁĴ IJ +acqui res +d ha +j yo +priv ati +can ine +k b +cra b +sar din +imag ining +k j +em por +down hill +ne z +ta eyeon +nick imin +gb p +à µ +w ap +sec co +ma shed +ðŁĴ¥ ðŁĴ¥ +augu stine +diss ol +dic tator +â ĵ +vi per +ed fringe +vau x +hard work +book let +no x +chi ff +ðŁĴ ¨ +observ ations +xbox one +u sher +ke er +lu p +dal las +cal gary +ma dra +di ous +k bs +wood ward +hero ine +lu mber +sea world +o ws +mc ke +maver ick +gu la +cross roads +fan g +s ade +nik ol +chee tah +me c +pp g +er ick +ðŁİ µ +tox ic +bj j +viol a +sp ire +ch ino +tra vis +institu tional +ha as +low ry +w ac +ea e +hu mid +mp ton +ru ck +je w +c ine +zim mer +se f +bhar at +fre es +aam ir +ðŁĴ ħ +z inc +wan e +multi player +royal wedding +e el +preci pit +qu ery +kimber ly +isa bel +ful fill +ig an +vau l +pan e +sc y +dig it +gun n +u tah +dog day +fi on +xia omi +da c +el ast +cha vez +ro blo +g ine +ten th +ab h +ke to +hur dle +na dia +memorab ilia +ha bs +qu an +h w +hv ac +pix ar +ec cle +kram er +accu ses +ðŁĴļ ðŁĴļ +per se +mean time +wa hl +atle tico +âĢ¢âĢ¢ âĢ¢âĢ¢ +ott oman +no vo +k us +conne cted +tru sts +d mv +spen cer +rahu lg +do ve +sto kes +bolog na +enthusi asts +à ª +rockstar games +ted cruz +du ras +s acked +late x +immer sive +cer t +lu cin +princi pals +fa res +sa ils +far n +am ent +saf fron +quent in +check point +fer ris +ex cur +ðŁijī ðŁı¼ +bai ley +se h +ter re +mad am +s band +wan derers +cumber batch +yy c +digit ally +blackandwhite photography +roll in +moroc can +ðŁĮ ħ +din ner +d well +to om +m ye +ez ra +cp fc +war hol +me er +jon ah +no aa +s gate +so on +secu lar +g ating +ti o +dri ver +si ssy +assan ge +ta th +ed mund +bobc ats +ra ji +po stage +stu ds +m gm +kat o +edin burgh +meet the +shir t +fa a +mens fashion +sp reads +wi m +car ts +phoe be +j ars +bot swana +Ù Ĥ +ed war +sk ar +ri ve +gu sty +c tv +ferdin and +su therland +nickimin aj +k v +si us +bee ch +re z +desi res +on ial +camp o +quar ry +lor raine +gil more +ig gy +µ ï¸ı +ho pping +avi z +ðŁĮ º +uni sex +dedic ate +att itudes +ste er +jun kie +rail way +y b +whi sper +key an +k us +ju g +di x +a ins +sum mon +ov ich +sy ed +her ald +ma ison +me ded +wild flower +main land +ri sky +ru kh +over looked +ki c +destro ys +nam an +ki p +z ano +champion sleague +ban dit +quin cy +smi le +cal vin +open ings +ta pp +ol ulu +spec tro +accred ited +ap k +pra ised +bar nett +pol len +premi ered +selen agomez +tou red +screen ings +uu u +mis o +en se +adam lambert +guel ph +har yana +hu tto +le ar +l tc +po ached +brex it +æ Ŀ +tt c +pa vement +mon gers +ro e +ad ers +ling ton +particip ant +ca red +ga il +y ates +lan tic +dash board +jo o +feli pe +ssi onist +bu m +s end +a eri +thu gs +luci fer +a he +dete ctor +fil ly +gas oline +ham per +hump day +the ta +the band +fore casts +o hhh +lo bb +hol l +cp u +az u +ad ar +hai ley +bu b +car t +quo ted +an archy +pan cre +twit art +al den +st ash +the less +or ni +belie bers +mor mon +partic le +avi ation +⬠Ĩ +webcam toy +sad dened +cru is +ham let +n ct +roll ins +marque e +saw yer +reli ance +a ura +di ec +soo thing +sig nings +ak is +à ³ +at kins +aer op +ðŁĮ ¿ +y ab +sh ari +con nol +du bbed +manufac ture +convin cing +feelthe bern +ra u +pu lit +on ec +gem stone +ur ging +bag u +ga h +aci ds +fi anc +zodi ac +sn oop +her rera +initi ated +ven ge +profess ors +pro di +stron ger +e mission +bb a +hal le +ta pp +haw an +wh im +compe ted +myr tle +ir port +cold play +ach e +ske p +m son +ss ic +calli graphy +swim mers +me y +pp c +thri ft +po c +re places +commu ter +âģ¦ âģ¦@ +go ers +lo gue +para dig +bas kets +sensiti vity +joh an +atl antis +& & +suit case +anxi ous +l h +str i +gal loway +stre ad +war den +gr ounded +ffici ency +li feat +reli c +disgu ise +island ers +f cofficial +classical music +b mc +en field +bi que +oak ley +bat man +sla ying +ner ves +mul tit +calci um +projec tor +scott sdale +ant ino +gri ps +kim mel +des mond +prote stors +hi atus +metaboli sm +conclu ded +press er +ti pping +sli de +e to +hun ting +aus open +ri k +pp ery +innov ators +pitch ers +ag ger +fun gi +z ad +proli fic +rockn roll +bl ames +ct ar +stam ford +q ad +mozz arella +insan ely +den ver +ph ouse +nom ad +ï ¿ +s ris +pro du +hen ley +pag an +am trak +ru bi +in cl +tu tor +sco tia +wo es +sing apo +fun nel +turn bull +know ledge +gri mm +real madrid +we are +missi les +con sol +emo jis +sne ak +smi ths +ru iz +br ou +i el +ha ver +ðŁĮ ļ +kin gof +basil ica +circul ation +prin ters +ta pping +ri dley +dra gged +ha j +writ er +fundament als +personal ities +me tre +stereo types +bur le +best of +n ffc +ha th +mini stries +a ali +trac ing +pav ed +ł ï¸ı +g ic +insp ire +tu g +ha re +repe ated +ex pon +lol li +rho de +pre cin +install ations +instag ram +az ar +i es +sole ly +du kes +mission ary +van guard +fursuit friday +on d +pol ari +ma st +har an +jos é +jack ed +ec oun +al ities +ne ph +ra vel +moder ated +sco w +s fb +uru guay +as o +ni g +au du +p ints +lat ina +ben z +m itting +char ted +mat ology +cit ro +biop ic +ðŁij Ń +djo kovic +fox y +agu il +so to +an ada +sin king +sc rap +hair s +bethan y +fact friday +ðŁIJ IJ +unlea shed +) ( +contra dic +ram on +coast line +y ong +sn sd +li gan +p ome +mit age +ge tt +wat i +ri sk +so aring +bru sh +f pl +av an +å Ĩ +lar son +sh ear +mul til +blu r +multi media +chun ky +par i +n ani +weir d +cholester ol +char les +dream ed +tan ning +puzz les +fr am +hand ball +ch ag +beli ze +al u +bang s +Ñ Ħ +detec tives +mc g +ish q +bo thered +saf c +mp ing +ten eri +g ays +sail or +an gi +mul ticul +gue ssed +ros é +high ways +bro om +chatt anoo +- ' +see ker +on ed +at f +lu c +> < +bar i +per cep +jewel ry +as ph +sor row +sl ing +mam moth +jac kie +ë § +wilt shire +sa o +can cell +im paired +tor ial +bre ed +guy en +jud ice +tit le +pro spective +applic ants +ðŁį Ĭ +epis cop +e id +b yo +stock ings +ðŁĴĥ ðŁĴĥ +ll p +sna g +keep it +l ough +ol son +matur ity +!! !" +cop ter +i sha +bl i +wil mington +tr youts +th ai +ðŁ¥ ³ +pe bble +kra ft +f p + º +ssi vely +li vin +contest ants +tex tures +jo an +h dr +film festival +prov ence +wi do +op end +c si +sto wn +cro ati +ad just +host ile +analy sts +il an +cu ppa +bru m +newfound land +good win +me tt +mall orca +plu gs +bu k +bb hutto +wrest le +sa ire +sho pped +for za +le head +vi vo +ba st +ro xy +reg is +hard working +hon olulu +desp air +young sters +ni g +impro mp +roll tide +de emed +tre ason +ru shed +for ged +ff f +pikach u +bri ggs +do it +ac cent +la us +gla ze +compet ent +a ho +photo g +mid field +le go +har vard +min orities +re illy +slic ed +once upon +initi ally +financi ally +landscape photography +har dro +qu o +mm ers +par kinson +smu gg +read iness +bru tally +glou cester +mp ed +bbhutto zardari +mur der +ye d +dat aviz +sr t +dow ning +bi ans +m ü +fle ck +fli pped +s ly +brilli ance +ri m +k um +bubb a +ko i +knit ted +sor g +ma is +ðŁĮ ² +ti ss +su stain +sen su +ak han +zi est +exam ines +chardon nay +user name +short list +re bs +on o +dar ing +hard wood +che que +righte ous +light ening +dir k +shra dd +du ra +down stairs +sh al +ami gos +ru ff +s law +ri es +red nation +man us +ðŁĩ§ ðŁĩ· +distin ction +u bun +dur an +mi gra +thi ans +la ver +domest ic +k x +jaz zy +justi fy +belong ing +insul ation +color stv +drun ken +chann eling +qu and +xi ii +enligh ten +kan o +fati ma +teen choice +terri fied +p ba +as ley +met museum +dun e +pack er +ki o +ðŁĴľ ðŁĴľ +bo iler +fas cism +ar mored +back grounds +in mates +embarra ssed +defin es +th d +we go +silic one +lo on +el ding +bor rowed +he mp +ak sh +kaw asaki +br y +de af +kill er +dispo sal +ðŁĩ ° +glaston bury +un covered +o xide +po ff +d ant +k j +ku ro +dri zzle +peop les +fe e +pro pri +dd lovato +pi ggy +ot is +aller gies +u bis +pengu in +ser a +vi z +prosp erous +ici des +tornad oes +sene gal +web cast +sto red +enchan ted +bb cone +bay area +entrepreneu rial +rednation rising +experim enting +ang an +lot to +they re +por e +er p +seren e +east wood +bro kers +bar ge +stal lion +timber lake +tailo red +dy stop +b ate +lat ors +di xit +bran son +dynam o +ky lie +shame ful +bt wn +spring time +mix ture +s ounded +lu ton +dad es +mal a +op ra +en ic +rahulg andhi +se wer +~~ ~~ +ky u +nor theastern +ca er +bc u +nir vana +kitch ens +ous y +al m +river dale +hid den +fl int +sp d +pat rons +katy perry +au gh +exhib itions +sm c +shu ts +at ore +da in +some thing +ber th +bo g +por ter +gen to +con cussion +ang lic +ro we +gr illing +scar lett +master ing +mor nin +comm ented +si me +si zing +christ y +ce os +st m +at ry +tari ffs +vac ation +pre judice +p su +paren tal +far age +can a +cap com +koso vo +you re +men stru +stal in +grape fruit +br an +che sa +dav en +exc el +!! ) +๠Į +distribu tor +ce a +bride sma +millenni al +wa in +ob serving +mis ery +plan etary +expo sing +bra ised +comp ton +don gha +q l +spring steen +th ul +syl ve +cab o +pal ad +niel sen +gaz ing +ba ja +r oud +orchi ds +johan nesburg +se man +d ji +oper ative +affe ction +eclec tic +at c +mut ant +aw x +nic e +mel bourne +indu lg +tu lip +dias pora +wel p +big gie +mississ auga +retri ever +or an +tam my +c ta +hipp o +seas oned +ger mans +eng v +marvell ous +im f +rela ys +mon tan +maur iti +me ister +as surance +reig ning +su fficient +han e +no thing +pos se +nav y +in love +brigh ton +en qu +ch ung +sweat y +es c +cal ed +man s +nicar agua +sl ices +mo cha +washington post +bb n +dam ned +grow ing +en burg +lo an +me s +wh oops +believ ers +spi el +vo daf +l at +s led +cricke ter +brown e +golf ers +bar ra +wat chers +lu igi +sw amy +mom s +pit ched +san tor +cr s +si re +sc amp +bo de +ste war +jon ny +ent ity +pac qui +mind ful +min india +bear ded +temp t +scorpi on +eat on +authori zed +ar to +s vp +op athy +cch ini +house music +disney world +âĢĶ @ +pro pose +di y +expen se +ten g +pupp ets +sm el +d aca +per ry +fin n +boo sting +lefto vers +cou gs +satell ites +man y +az e +g ong +fi e +metho do +fer ries +ðŁ¤Ķ ðŁ¤Ķ +explore rs +load er +attrac ted +il ton +godd amn +pi azza +doc tr +sav ing +paragra ph +visu alization +may ors +work flow +ack les +ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ +ठ¸ +twer k +clu t +lo ver +te ases +si an +o te +deter ior +accor d +l fw +swar ovski +nat al +tra ps +k ina +analy ze +laye red +bever ages +un it +ran som +pe shaw +dest ined +astro logy +si pping +miley cyrus +cam ino +marshmal low +bli ss +out back +fa q +int oler +humil ity +po ppin +hallo ween +mon tene +op hy +nu n +tattoo ed +a as +ðŁĮ ³ +dale y +qual ity +du sa +fisher men +swi f +ter rac +st au +le in +trol ling +ship ment +garden er +march madness +head band +gr t +bur nett +w and +!!!! !!!!! +gh e +du x +hu d +war ner +ðŁĩ ¦ +ex ile +rescu e +rat a +d han +duc ati +dro wn +bl ends +spi e +alli gator +simul taneously +broo ke +u ke +k har +comm union +ri ka +ford fc +chin atown +you rown +me y +can al +syste matic +de pri +ox ford +an il +w ut +equ ation +be z +fle ur +the good +lang ley +ad ity +ed ith +al fie +о ÑĤ +en cry +br ill +ex emp +ce sar +mb ling +ab ri +sc icom +j ing +school ing +mi ka +mechan isms +impromp tu +rhe a +moo re +crime a +be sto +wri ght +el ders +ro ds +kam al +folkl ore +be et +mini on +reli eve +thr o +team usa +pas cal +made with +boli via +itt i +free bies +desi red +best selling +l iness +la den +ke ane +mi sts +hipp ie +atta chment +@ / +se w +flan agan +âĿĹ ï¸ı +supre mac +stl cards +si as +q u +rh ys +ste ep +val leys +v w +pav ing +disp at +al ison +por te +id u +new sc +soc ket +mo s +co star +re vo +prote ins +stanley cup +m cal +ear ring +se cs +mc lean +cap ric +nick elo +ad en +v c +shou se +adap tive +maxi mize +entertain er +pro se +gri ffi +six teen +lam ar +mi rage +saudi arabia +awe ather +ru st +in filtr +fashion week +ðŁĺĬðŁĺĬ ðŁĺĬ +selec tive +bubb le +a den +fen nel +deci sive +m ta +mock ing +mb les +st amp +mu le +bernar do +gr in +po tt +j ingle +vet tel +colom bian +cam o +motivation monday +ba han +p ly +dh ary +k ami +x men +sleep er +gar a +my sti +confi dential +conflic ts +p neu +ce s +insur tech +clean se +me rely +va is +tu x +the great +shar on +ma j +hol a +eco systems +aj ay +aa j +hu sh +har mon +backto school +wiki leaks +reflec ted +ðŁĺ ĵ +commemor ating +ac et +buck ingham +messi ah +tu ous +hor net +to be +d q +he ine +mi g +pl ate +nichol son +sp ie +cumber land +nor mal +pho bia +happy halloween +city fc +mc el +gilli an +ke to +lu de +de mise +su ga +str ate +mcgr ath +visit scotland +foo led +cb r +gc se +col ori +po td +missuni verse +fin ances +ma poli +for ks +Ø ´ +cann on +medic inal +ðŁĹ ĵ +kh o +wre ck +pan to +bag el +gu ll +syndic ate +ic y +pr c +ki en +zi ka +ti sh +pe ta +c co +li za +ch ut +ex traction +el g +gl i +fu eled +pos it +respec tively +leice ster +br ink +vulner ability +im ported +e sha +ðŁ¦ ħ +r ural +re ll +gam ing +atlan tic +aband on +no ah +re solved +pro state +aller gic +ps d +âĺ ¹ +dun geon +fang irl +illumin ated +m hs +white sox +d ently +ck o +endor se +over ly +dazz ling +prior iti +night life +ut il +be have +flam en +east bound +ðŁĴ Ł +ilove you +gov uk +mozam bique +alle gi +dr i +testim onial +ath s +ì§ Ģ +mm y +shab by +pro secco +friend ships +cal am +dam ages +off set +jura ssic +jun o +arre ll +ðŁĴ © +interven tions +dare devil +car ver +run away +ran e +truste es +ha ute +dep ths +ðŁİ Ń +me in +sacrific es +con cier +ne sting +i zzy +me tam +ilove my +ur ine +du lu +mal hotra +ve ins +night ly +co at +an di +he witt +lon el +ci ble +wr ite +jen nie +sant ac +ĸ ï¸ı +str ato +singapo re +sop rano +kri sten +cheer ful +flee twood +fa iri +m eli +wa st +tur nt +sfor sale +sc rolling +angel ina +ren dition +jeric ho +nick y +or b +fla vo +patri ot +ash eville +sick ness +re fund +aggre ssion +b pl +ãĥ ĥ +elu sive +thi story +hang er +bu ffs +vil las +at kinson +sp h +ja it +decl ined +wo k +supre macy +oo tball +ey ang +ðŁİ ĵ +s ford +ath i +consu me +road ster +e so +u pro +reci pe +au f +uc i +ar on +oo oh +cs go +re ich +mc d +min ute +ladi es +pun k +rut gers +mee k +ariz on +ta j +land lord +de gra +autu mn +lyn x +us f +b hi +fairy tale +dongha e +bet sy +explo ded +chen nai +op a +pro tag +br ant +ðŁĵ °: +g f +pal li +ðŁı¼ âĢįâĻĢï¸ı +su t +ill ini +colum nist +shir tless +de centr +sear ched +ec or +bu ggy +s ack +ðŁĺĤ ðŁĺŃ +de t +ther i +or naments +bring back +to v +quarter finals +ic he +con stra +gi er +buchan an +vi x +kay aking +mu stread +swal low +mel b +sc af +op al +may oral +har at +ðŁ¦ ĭ +schedu les +id f +ha gue +ro z +a ah +d mc +du plic +ca che +orph an +frac ture +rec on +ch av +bun nies +al ain +mustaf a +ðŁİ Ļ +vac ations +dynam ite +tex ted +broad caster +ðŁĴ £ +ste amed +rock er +di etary +luxury travel +inaugur ated +sa wards +vaugh n +lincoln shire +click ed +kra ja +f anc +remo ves +layo ffs +mc far +bre eds +win nie +jon ghyun +incen tive +vari ations +pat ton +atur day +persist ent +pr un +pi ers +dal es +æ ĸ +breast feeding +r ance +ta wa +Ĥ âĸ +mur doch +cap tive +thi stle +nic a +commod ity +cou ldnt +board walk +graci ous +practiti oners +n gc +scru m +ner o +camoufla ge +col on +he i +phys icist +saturday morning +ten er +si won +colum ns +bru ne +y vr +ba ir +reti res +hal am +cab er +shaz am +min u +cas cade +milk shake +gri d +d ren +vin cent +so dium +plat ter +cheer leader +chen ko +y ak +elimin ated +ty po +y man +re think +âĿ Ĺ +ts ville +bernardo kath +ex tr +ðŁĺģ ðŁĺģðŁĺģ +ta o +re per +mo ths +em powered +c iting +transpor ted +mon ks +san at +cle ars +bachelore tte +camp bell +racha el +har le +hand ler +climb s +inter ference +rele ase +sh and +r bs +hr h +ãģ ª +val le +r é +sli me +w akes +chu bby +slo an +el ves +ath en +attor neys +micro scope +ston er +sc aling +o be +c out +se man +mid week +bal sam +ðŁĺį âĿ¤ +ti ful +v ish +lo tta +ri pping +re mn +ti re +le ap +ha vent +la by +hi mach +whisp ers +we in +ðŁİ ¸ +wild flowers +se le +u cc +li ability +az ine +sw ings +k ya +ta ir +re main +e do +flo ps +poc ket +grand ad +exam iner +gr is +ffe ct +ðŁijĬ ðŁı» +stud ded +heart beat +de acon +firm ly +infec tious +ste f +out lines +le asing +cla ws +sen se +tab s +hoo t +mo sul +spa wn +co a +hog warts +ve in +alban ia +manu el +b ino +vaux hall +scot land +go bucks +mat ty +phy sio +tor ino +const able +investig ated +s lower +mistak en +bay er +wild fires +vo ic +x on +time to +chas sis +bar ric +pi on +bald head +woo k +regi str +dra fts +b hs +li gue +l ick +staf fordshire +baf ta +dar ry +je anne +ven ding +cor p +⼠³ï¸ı +kid dos +fen way +ca o +west bound +ðŁĺ Ļ +dv r +quick er +bla h +goo die +ðŁĴĭ ðŁĴĭ +vo x +esp er +fac ade +cor relation +red bull +rou p +decl ining +chi ve +mc gee +tur o +in der +f eller +fu g +il ysm +mar di +peshaw ar +ki eran +ine ma +meat balls +pe ck +depre ssing +sen sing +gi z +dd ington +spring watch +ro aming +yellow stone +horse shoe +am man +week day +ol or +ðŁ¥ ° +boo sts +spr int +scar ves +je e +bee tro +cl an +all the +ìĦ ¸ë +enlighten ment +ado be +re generation +? @ +cont ag +yach ts +to u +mor a +en voy +r ani +go li +dhanush kraja +wood working +streng ths +se di +disc s +ar ina +sc on +lit e +ano ther +ðŁ¥ Ĭ +ye men +gu ern +sav vy +lo yed +biom ed +heart break +comra des +milli e +pat ch +un f +jar vis +bl aming +commemor ation +ge y +å ¥ +cardio vascular +alig ned +docu ment +. ? +aesthe tics +em u +the irs +le h +ps ic +si f +pl ateau +ex pend +domin ating +rob es +mauriti us +excep tionally +hom er +discover ies +bra un +ten nant +insul in +ðŁİ ® +car bs +te as +? !" +zi e +franco is +brow sing +th ol +cla rence +hel per +ob tained +cas sie +le es +! , +pome gran +hu bs +presti ge +] [ +mach er +bott led +pun ch +pi pe +o ch +gall ons +deliver ies +u ra +un day +mon de +depic ts +re gency +outra geous +khal ed +car o +he arti +za g +develop mental +over coming +stati stical +flavo red +for ds +cre atives +lau rence +di as +sun screen +in ked +pre acher +n ul +impac ting +auti stic +âļ Ķï¸ı +o ss +pel icans +cele ste +v b +ru mp +mc gra +fair fax +hu mor +bbc news +row ling +cal der +seam less +ag ne +p ti +mix ed +t shirts +mer ci +b tob +women instem +genealo gy +pre ven +l our +cra dle +gi use +Ð ¾ +chron o +fair ness +chocol ate +tor y +as da +pre scott +stret ched +al man +u il +re charge +in tre +ob st +hosp ital +hay ward +teneri fe +fried man +vap ing +confe ssions +ye ah +bal li +luck now +cor pse +sculp tor +amp ton +t pp +indic ates +sur plus +tru man +ðĿ Ļ +sin ha +in vo +sovere ign +ke v +establi shing +engra ved +assu ming +ðŁı ģ +sou za +fab i +ton ed +oun ge +del oit +dow ney +no ble +om or +car tridge +ðŁı IJ +u hur +hol loway +succe sses +r sa +âĦ ¢ +ma zz +tw d +disc ourse +. < +y at +satis fy +com pri +ठ¹ +graph ite +disser tation +ar ter +í Ķ +b ally +zom bi +ly ons +a ic +u bc +pra da +e il +da x +cla i +grand daughter +extravag anza +chall enge +ðŁ¤ ŀ +po ver +primar ily +dad dy +man a +bi kers +inqui ries +da un +fel ine +gener ative +he f +benef iting +lind sey +pol ka +demonstr ated +al le +rand y +o su +low key +weir dest +red bull +our y +n ous +wood stock +cre denti +nic er +g ado +aly ss +ap h +prepa redness +station ary +incorpor ated +dy er +sarato ga +cele sti +: " +antibio tics +or gs +inde fin +ap ron +и Ð +fif teen +no f +ðŁĶ Ŀ +ph x +te ga +m z +organiz ational +on air +band ung +pleas ures +mor i +secre tari +rac coon +ca shi +pil ates +k on +geof frey +la o +kam p +depart ments +back packing +an am +à « +crack down +aun ty +on do +li zzie +ph ers +cu n +ðŁĩ ± +k pop +pu t +inten tional +connol ly +bar clays +hs fb +swin don +u ku +s ally +a int +âľ ħ +pen ang +up lifting +epile psy +inter ro +bun gal +go ku +blue berries +ठ¦ +u ssia +sil ky +mou red +i stic +bri efs +me ats +go b +ch aser +state wide +pra sad +gl itch +ar in +ban ff +memb er +ðŁĺŃ âĿ¤ï¸ı +lo ving +hall a +ภ¡ +smo kers +yak u +scicom m +physi o +sw ol +lem ons +gel ato +ch ool +capit als +ki stan +ti ghts +spi kes +trav ellers +ik lan +commissi oning +ar ine +emabiggest fans +empha sis +front line +pad dock +destruc tive +ba ha +l inger +je wish +shet land +mc gin +mon key +ko z +s one +raj ini +te h +y en +c vs +masqu er +gir ly +we sle +was nt +bro dy +termin ator +gil le +mag gi +bir die +jeopar dy +cu bic +vm ware +intric ate +an up +to pia +east on +sab res +investig ates +bu sting +bil ingual +valent ino +in format +fer re +advent ur +hydr ate +for sy +az iz +san to +e de +whist ler +continu ously +d ham +un used +ji had +addic tive +vi dy +do b +i do +fi ed +ni versary +n one +fu er +ðŁĺį ðŁĺĺ +coven ant +prin table +immac ulate +o em +cl t +serv ants +consu med +un released +sc um +pack aged +me re +ìĦ¸ë ¸ +to by +ta f +spo ons +me al +f ball +fair field +jan et +silver stone +dart mouth +follow me +voy ager +kom bat +anni ver +ene w +mag dal +ho ve +sa th +grizz ly +car di +gart ner +sand y +kan ye +post ure +po ign +im pulse +radio logy +horiz ons +si am +aish war += => +no che +tr is +el yn +com me +du i +ce c +councill ors +cudd ling +creep ing +loc ke +manag es +trans ferred +ne cks +di er +dan o +v ick +lun ches +d he +en sures +cri ss +ul ster +bann on +cont enders +sp am +sweet ness +med al +hon duras +arc tic +ultra sound +in fr +disco vers +ei ffel +ca sters +ru ben +du st +awe ed +atri um +lest we +se ared +ðŁĵº : +ty ne +ex changes +little mix +l le +astron auts +hersh ey +work day +kno b +so v +re signs +today show +der man +an th +af c +ta ster +sw oo +sa eed +per ing +narrow ly +rn li +best buy +panas onic +obst acle +farmer s +ðŁİ Ļ +pa wan +ki est +ang ers +absur d +oh my +sin o +pist achi +sp ice +giu li +prime time +ko w +k ens +ex agger +! ?! +u ba +midd les +ju dd +e jec +slam med +pen sions +of a +re create +b hp +xx l +liver pool +thre sh +pur ity +ni eu +hol ics +wr ath +ra do +gli o +am ma +dile mma +cr u +lets go +.... @ +âĿ ĵ +sugge sting +tru mps +hor us +f v +ic om +refer ring +predic tive +tar ts +ge tte +so ck +glo ssy +pin ky +al ec +thy me +ou ra +thero ad +pe tr +cr am +p fi +dv n +me ier +incen tives +tun nels +mobi l +rec ap +extra s +upri ght +rev amp +per severance +, - +ot p +mir ror +ar wx +ger ry +ma her +g or +hom epage +am is +ag ra +made le +best friend +sirius xm +bun dles +admir ing +t dsb +ðŁį ģ +ch as +slow ing +ro h +wall papers +âĢ¦ / +tek ken +gang s +tal a +lind say +shou l +line backer +tool kit +ur anium +caly p +ab rams +mat thi +ðŁı ¿ +hon ourable +da yo +ver sail +tan k +st c +fr itz +spl end +pat ag +anno yed +on day +devast ated +chattanoo ga +national ism +mas sey +jen n +tail or +dev gn +org ans +zu cchini +on fox +sat ire +wex ford +dis grace +no to +vol ta +âĿ¤ï¸ıâĿ¤ï¸ı âĿ¤ï¸ıâĿ¤ï¸ı +à ¶ +home owners +poin ter +m cr +au sten +day sto +mo ons +pal ma +gra zing +e so +influen cers +shahid kapoor +compli ant +measure ments +develop s +y d +par l +p vt +rand olph +tor tured +ger ald +eli as +deepi kap +war mup +hick ory +g ap +co ffin +am our +re neg +moun ting +seven s +ig le +hi er +dec ad +tri ght +esc apes +wer ner +t fl +ful filled +ni ger +sour dough +re aper +choo ses +spin ner +week nd +fil tered +sh uk +kat i +old ham +open source +kh anna +at elier +conne c +opho bic +gla s +complic ations +ar son +counc ils +sm ol +as sy +lur king +ling ui +han ks +e in +Ù ħ +ru gs +n guyen +nou veau +men ace +le v +alad din +ru ining +round about +k m +con or +shoo ps +may day +traum atic +prab has +ka iser +k ita +rou ter +pe dro +re tar +stun ner +spani sh +distur bed +acade my +e learning +wit ty +sen g +fer al +av y +sta b +ke aton +ur du +ko to +hu i +coo ke +ari an +the personal +u ma +se ap +a sting +rhetor ic +hand writing +munici pality +consor tium +ðŁIJ Ł +glasgo w +ra ya +eli za +polym er +bro th +prac ti +correspon dent +addic ts +gay le +ail ing +o fe +p li +hear tw +st itch +sight ings +prie sts +sam o +slo th +good wood +roc co +sab c +summ it +l ace +pres ley +itt en +cin cy +thepersonal network +s week +pe gas +af con +regi stry +ci m +le th +dic ap +cand ice +flu ent +sm ack +pede stri +al oud +car ac +priyan kach +p gh +ir ons +dol ce +lat via +dece ased +thero ck +cla p +cen e +fo am +morris sey +gre t +essenti ally +com cast +be agle +argu es +ing ed +- âĢ¦ +sa g +ha san +ðŁĻ Ĩ +ðŁį ° +nh ra +kann ada +indic ators +on er +bri xton +at as +screen play +sor ority +sha heed +he em +class mates +tain ment +es i +breast cancer +zucker berg +aur or +en cia +ref ers +kae per +vor tex +com part +lym ph +photograph ing +ste ff +rest ling +par sley +mom ento +th man +lac king +du tt +ocu lus +fin o +fren zy +ra sc +der n +dis missed +noo k +met gala +sh ill +rapha el +maver icks +exhib its +eag erly +c pa +amen ities +. âłĢ +exo dus +ern st +lit a +deal t +womens march +i ain +score board +campe ones +c en +ti ki +garri son +fidel ity +bra g +road map +psy chop +lo e +ble u +ðŁijĬ ðŁı¼ +sau vi +spr inger +temp tation +ru dolph +ac ura +wic z +parach ute +stro l +len ny +zi k +dom s +nb af +al pac +vivi an +ro ve +pre et +perpe tu +sna ke +air soft +infl atable +prin ces +ati e +ffe y +pati ent +m ire +chel le +sl ack +groo vy +# : +up loading +!!!!!!!! !!!!!!!! +siem ens +provi sion +v fx +need y +f ats +to poli +bhu tto +sa thletics +alu ms +t winning +south western +adop ting +last night +man ne +la ga +tw ell +ac ia +-- -- +eye wear +hur ley +fle e +sa ch +pe cker +cost ly +is k +cr ates +polic y +ero sion +in go +wer k +ðŁIJ į +torto ise +therap ies +inter net +chihuahu a +ri ps +fre i +ed or +tai ji +t fc +do d +demp sey +christ in +chen g +hi ps +gra eme +com passionate +cavali ers +histor ic +soul ful +crimin al +ja c +vin ci +expi red +sur at +turi smo +k ona +se aweed +ber ts +le ica +expre ssing +a al +wor t +break fast +her ring +am used +rhu barb +mar tian +cospla yer +y ash +stri al +ra ul +refer ral +dw ts +j w +ad ler +cur tains +gu r +val ence +tyr one +sw fc +coach ed +re born +diabe tic +cho ke +nor folk +investig ative +ðŁĴ¯ ðŁĴ¯ +z id +v mas +phi e +objec tives +âľ ĭ +over due +di vers +mat su +ðŁİŁ ï¸ı +casu alties +ภ§ +al k +stand ardi +re alist +arti facts +pand or +ke x +in vin +( !) +ine y +par aly +mr t +fay e +the voice +on ga +de ed +skin ner +az wx +speci men +priyankach opra +nu evo +bar kley +toulou se +resu mes +football ers +cit i +fe tch +è re +lestwe forget +ðŁĻ ĭ +ch unk +dri fting +manipul ation +equ als +pu tt +ky ungsoo +âĿ¤ï¸ı # +ela stic +par ano +fo y +do ping +cin cy +ss ler +interrup ted +al ay +ado res +ame thy +con voy +ãĢ ı +Ĭ ãģ +black list +gener als +sa chin +bru shed +oun ces +non stop +illi ams +bt sarmy +u av +ru ff +bur ma +bi k +defen ce +schul tz +bo asts +lonel iness +go re +trans forms +alum na +@ @ +ra ppers +ne hru +car o +himalay an +wearab les +ge h +pepper mint +re development +flam ingo +cos by +big baldhead +ag ri +bare foot +sco pes +re gram +gh ana +ðŁİ « +i heart +sa die +carri e +microbi al +ku ala +sk ater +quer que +âĻ © +gen res +reas oning +ch ased +as o +sli pped +en can +vam os +ker s +ad verse +mo il +commod ities +with you +sil ent +hy pe +an de +am ination +whi spe +lit z +âļ½ï¸ı âļ½ï¸ı +ri ff +pp y +lam bs +gan esh +ab sent +regu lator +marse ille +en roll +par cel +wa p +by rd +ðŁĩ Ń +tu ber +country music +par l +contro llers +responsi bilities +we y +ch ate +montene gro +chic o +mil an +l ms +tra inees +appropri ately +un certain +popp ies +ed sheeran +nutr itious +gar o +deut sch +awe some +ãĥ ¼ +comfor tably +land marks +et i +re usable +daniel le +ro sal +co les +just ic +c cs +f anny +ni m +mc u +clin ch +at ene +mer ge +im db +ang lo +uc cino +pan ini +an not +bur berry +feat ure +predic ting +fashioni sta +s ask +imag inary +mm o +south sudan +spe ar +hu bble +jo inthe +coyo tes +sli go +ko dak +sit com +polaro id +roo ted +corru p +ðŁĻĮ ðŁĻĮ +bris ban +at z +ah l +re my +tal ent +aval on +ra da +pau line +locom otive +go ons +ne mo +maser ati +ic u +stu tt +histor ically +sm b +pres by +avo id +so oners +rhine stone +w ad +ri sing +tro t +mo des +reg ent +optimi ze +re ece +sm u +ver ti +newyork city +cor tez +ra c +in case +sin c +fiel ding +e tta +tiff any +al monds +sad dle +k rat +mat ter +g low +star ving +gl o +cra ppy +sl ur +st d +monit ors +recei pt +maymay entrata +mc il +un is +rain bows +cal dwell +pacqui ao +j op +a fe +hoo k +es sen +wiz ard +medi an +fla ws +com s +âĿ Ħ +ing h +ha ynes +anton io +tem plates +ou ter +na w +cardi gan +bel grade +ðŁĴ ī +hom o +a ise +ro pes +no ve +what you +tri gge +concep tion +ad ukone +na di +fri ars +sw er +adju sted +hot line +san ity +kau r +down loading +c gi +ten or +eth nic +app alach +ภ¸ +pa g +gol ds +on set +investig ator +car tel +peace fully +jarre tt +cat alan +poli o +n um +fru stration +dhar ma +my life +âľĮ ðŁı» +aber deen +mu sa +bin der +spark ly +fle eing +instin ct +co ping +domin ance +ill ers +er a +u conn +lo oms +living ston +gal i +he s +c ma +bel a +se ley +mon k +la ch +mar x + ´ +m erica +woman in +es sex +ra ina +jim i +nep tune +z ack +chine se +mart ins +chand elier +her n +with us +ear l +asph alt +modu les +st p +ul la +psychi atric +mile age +captiv ating +si der +men to +mor t +tran ce +tal bot +ab by +ì ĥ +âľĮ ðŁı¼ +j ak +daw n +turn up +scre wed +fe ds +blue print +ðŁĴĸ ðŁĴĸ +har sh +er os +insom nia +ban kers +ta emin +mis conduct +hu mber +gi di +edu ardo +con a +musc ular +consu ming +ra sh +don nie +di pped +col lie +samu el +melt down +ðŁĺįðŁĺį ðŁĺį +me z +exam ining +schwar tz +pri stine +ðŁIJ Ŀ +ve it +ful filling +an esthe +gue sses +dra ft +som me +soli d +pati onal +ho ped +evolu tionary +all er +enter tained +sli ps +lud wig +conclu des +sen sible +bon net +cra ze +tra s +haz ards +const antine +ed ics +star trek +to c +occu pational +in cheon +deepikap adukone +pizz as +new comer +de part +oppre ssion +ebon y +foss ils +tro jan +el en +ste aks +k hou +positi oning +ug by +red cross +ak h +dol ce +us mnt +pp en +dil ig +ma vs +call er +cost ello +⼠Ħ +dy n +thing s +rhin os +a xi +sar kar +con vocation +att ers +ss ss +fun gus +eu gen +russ o +squ at +w sb +eli on +william sburg +s off +defici ency +be arer +o kin +key stone +t wain +cal ming +break able +wa res +horser acing +com bs +bun ting +u it +t land +ðŁĴĻðŁĴĻ ðŁĴĻ +ga stron +sab ot +ick ers +commissi oners +sen ate +ii ot +ath ena +nit rogen +an tony +ero tic +di alo +mis sou +hypo cr +âľ Ī +kaeper nick +can v +d roo +clevel and +o sh +mon sta +stefan o +^ ) +sh ul +po ison +ha e +commerci als +ma ul +nit ro +co worker +alo e +vap or +t ents +russi an +qu id +question able +mid get +po ker +girl friends +sin the +erit rea +ten ure +depos its +buc keyes +spot ter +theod ore +trin ity +joaqu in +u cci +follow the +caf c +mp a +ðŁIJ » +plo tting +dom ino +ta ek +sion ally +dicap rio +pa p +car mel +ig er +bt cc +beth le +www bigbaldhead +foo die +bagh dad +mason ry +off ended +à · +ภģ +sc ro +vers es +ori ent +ar ches +pi yu +know your +gre e +ta kers +gu ard +dish on +bucket list +bha fc +war dly +ðŁİīðŁİ Ĭ +leigh ton +pe w +stra y +assaul ted +in hal +ly fe +amar keting +l x +kat z +ubun tu +me o +carto onist +turno ver +mi z +dis like +mul len +mo f +bl and +hi des +emer ges +chori zo +truste e +ma hog +lan sing +paralym pic +fa int +fa una +ch al +sn ar +cat h +bent on +cast illo +sli ppery +apric ot +oec d +bar o +l z +he ming +clow ns +co workers +peru vian +commu ters +y ell +ðŁļ ´ +under ing +v j +tt p +fli pk +w ana +soc ent +Ĥâĸ Ĥâĸ +ठĤ +oo sa +jag ger +di sm +e less +d ham +cali f +a official +ec lip +harro gate +gra pp +com rade +n tr +concentr ate +thi ghs +bit coin +bel arus +ë ĵ +end uring +now watching +industri al +pi p +ar on +ar at + ® +whit by +oooo ooo +sa ree +tic als +mis leading +yo on +year s +sle igh +roman ian +sciss ors +vam pires +ac up +ab ba +th weeksary +cent ri +fl ye +u o +c bi +bu ena +sin d +mar ino +bur r +re building +ठ² +anniver saire +ac ca +ðŁĴĢ ðŁĴĢ +gett ing +tu lips +wolf pack +âľį ï¸ı +more than +ta kin +ðŁ¤ĺ ðŁı» +u be +mon ic +dou bts +mo wer +co balt +don ne +specul ation +argu ably +kak u +htt ps +prosecu tion +din ah +stam atic +disclo sed +bever ly +fl wx +cra bs +extraordin aire +war mest +imper i +o logists +trac es +par c +lake side +am r +ter i +hour ly +domin ation +ar row +shrews bury +ance stry +wr angler +trigge red +pen sac +roo ster +survi ves +a on +bo ko +val or +love is +la g +pe y +fo cal +out laws +bl anc +artic ho +wit s +marsh all +die go +support small +u ca +sa h +je et +syn ago +gover ning +ðŁĴ ¬ +sal ads +cre ate +miri am +cen sored +ami de +no u +z eta +allegi ance +* ) +bl m +ric an +pa stors +oly mpus +blo c +whir l +star ry +pr one +y k +p ne +congratul ating +be v +so ber +love island +sa ir +an ing +tutor ials +q e +lun d +in ist +cle ver +taxpay er +ali z +wren ch +dd ling +cap ri +h pa +ðŁı» âĢįâĻĤï¸ı +na j +o j +futuri stic +jelly fish +ðŁĶ¥ðŁĶ¥ ðŁĶ¥ðŁĶ¥ +cel ery +plan k +fil a +ne me +un healthy +lec tions +ðŁ§ ¡ +rit chie +n ws +mi kha +wonder woman +âĢ İ +hip stamatic +ka g +ðŁĴľðŁĴľ ðŁĴľ +poul try +mo w +wor ds +lo ff +ðŁ¤£ ðŁ¤£ +relat able +re mixes +keny atta +ke m +re signed +fo d +stra igh +j lo +hu tch +box ers +colle en +mag s +instruc tional +ko l +attrac ts +pra g +account ant +go ggles +br u +th ole +mar row +leu ke +oc to +pon ds +bubb ly +he ist +ìĹ ij +im p +a har +ha unt +hall mark +psy ch +kkkk kkkk +col umb +jump suit +cost co +si delines +ag gies +over turned +ni b +key chain +fu k +f af +mi am +assist ants +cy cled +ri der +dam mit +red wings +mag es +kin s +ì Ĥ +ho d +son t +carol ine +" ' +cu le +bra id +fel ony +ar ities +ruther ford +depic tion +isab elle +ro ach +k day +fifth harmony +em y +li gam +bari sta +albu querque +gro ss +ðŁį º +oo ks +ðŁij ¼ +dun can +try in +jag s +g ould +li tho +âģ £ +а Ð +sam my +tun g +cas ser +apo lo +aaaa a +man g +as ics +sh en +p ye +tur bul +ss p +saint sfc +on lin +n anny +he ster +do z +ภĶ +th read +ren ts +kh and +ðŁĴª ðŁı½ +un conditional +rob son +car re +ph on +sacrific ed + £ +auto s +par ker +oc a +log in +kee gan +hard cover +dough nuts +ðŁĮ İ +spit fire +refresh ments +saskat oon +commod ore +j f +rub ber +halam adrid +child care +stra da +io m +ri k +dak ar +ther mom +cro pped +gar u +ali k +ven i +i ft +si ka +ritu als +z ul +e ch + © +su dan +l land +i me +do cker +ì ¤ +fe ared +fa o +wal ter +no g +mutu als +l h +ali gn +mon ia +concep tart +ðŁĻı ðŁı¼ +sco e +compet ence +sw ine +ly me +laun ch +green er +abstract art +inqu is +gran ada +ga elic +flu ff +d backs +grave yard +ba be +acade mic +adventur ous +joh ann +~ ! +bi bi +| # +pl ings +gett y +as b +âĿ¤ï¸ı @ +staf f +religi ons +bang or +world bookday +me gh +de vin +ash ore +meri dian +gi thub +qui z +all stars +be stest +ir resi +ack er +do te +war rington +pol ly +newor leans +cr ou +wi gs +che y +smithson ian +la sag +de tour +bor is +stra ps +mari ah +inten tionally +ko h +ðŁį ¸ +ssi an +mar issa +cor al +episcop al +casu alty +tom o +supply chain +sam p +on go +ro o +cavi ar +p fw +clau dio +buff alo +s ations +mat ty +snap back +l ds +al arms +mat te +âĺ Ķï¸ı +conditi oner +d ors +he x +fi zz +a stri +sus sex +secur ity +qa eda +all star +cocac ola +as one +cl icks +sc ans +mu te +he avier +ðŁİ § +âĺ ŀ +lv l +book boost +youtu be +fla shes +f jor +c su +explo de +do dge +cair n +gonz ales +th ill +pel le +hart ley +renew able +re tin +e stre +costar ica +shipy ard +nc fc +pri ya +a ghan +an ath +plu gin +co rey +re bound +or u +kat rin +hor mone +gi m +mahin dra +s sus +park land +har per +fanta stic +infer no +ep ilo +wrest ling +fe ct +c it +ac oun +to ssed +monu mental +char tered +bu st +pe tra +âĮ ļ +wildflower hour +sweat ers +* . +bl er +ate ch +go wan +demo graphic +bra l +suici de +renov ations +vu el +sin ister +ar mani +miso gy +ph arrell +nap s +un iting +crusad ers +cor gi +insu red +than i +no or +g q +d ada +bicy cles +snu ggle +sch an +ten berg +ss al +fe mme +bo il +½ ï¸ı +re ap +occur ring +hus sein +divi d +sto ke +sh alom +na ia +o lic +frustr ating +Ù ĩ +ig s +gro ver +scen arios +n ds +bru tality +med alli +bu on +sas s +skate boarding +ony x +lor ry +ny u +gau tam +mm ings +gu g +end i +lo thian +comm ando +chal k +ph ora +asse ssing +ti gh +crun chy +ad ay +is l +ci ara +pilgri ms +kam al +p to +brit anni +t ani +sm c +l ure +app store +ab y +golf ing +cl c +fa u +an as +shu tting +regul ated +carn age +scow boys +all enge +c ma +humbold t +rel le +ku mb +her i +refin ery +sound check +d wayne +bos nia +i sp +the alth +anni v +relev ance +my a +bag gage +dre ad +s bc +th ed +bu h +hi jab +lo id +ke w +c te +respec t +lovel ies +cu bes +celebr ate +dir t +sav ers +_ , +gar ment +pulit zer +mas jid +beat port +al arts +encry ption +s ner +ple ads +found ry +sym metry +ru mi +birth place +scallo ps +supp le +pivo tal +t ati +no de +so d +pro xim +tr ics +col dest +bren t +mand u +cla ir +e ach +and alu +hi ddleston +ðŁIJ º +mel ts +v ance +pin n +se ments +scre ened +sa chs +o bl +ic ha +âĺĺ ï¸ı +school ers +heal ed +lo gged +ðŁ¤ĺ ðŁı¼ +ic us +bore dom +b ish +b ffs +tal king +sure sh +hoo kem +de on +de fl +ei leen +ðŁį ķ +women intech +ri sotto +rang er +adverti se +ภģภ+tel ly +la go +dart moor +d ong +sk ates +lo go +un ner +mail box +ma sala +lo oooo +amethy st +che wing +c bb +australi ans +rc mp +game art +# ... +kor n +extre mism +fruit ful +anci ent +pu bg +pol ite +wh it +mur als +m gr +line man +dav ao +ste ms +ten nis +av age +tu pac +gigan tic +hs bc +auto biography +up the +ี à¹Ī +re gal +fig uring +ku l +mis sy +hoo p +gra s +for ums +back lash +abduc ted +p nw +min ic +bu tt +bott oms +at on +ven g +ðŁĮ ı +del aney +prab hu +fan club +over haul +health ye +sy no +aa f +ren amed +kim i +un cle +man city +se u +qu anti +este em +um in +en zo +mel vin +under go +j har +far ah +coast ers +humph rey +mh z +children s +^ . +d hi +disrup tive +integr ating +r nb +over sized +a ide +ne au +docu mentation +ðŁijĢ ðŁijĢ +pal o +hear th +ri yad +pun ctu +abc news +secu res +boy band +bir ch +ju co +tra ff +legislat ors +bay a +ãĤ ¯ +no ises +collec ts +s warm +k ner +bi shops +stur geon +snapp ing +mo l +fre aky +chair person +tro p +lyn ch +car cin +art sy +e sto +cha i +fl ur +inv ali +sau sages +im el +j or +fun fact +wit ter +puni shed +ac ons +h ya +re versi +em c +dif fu +z x +sp aw +cla d +d mit +hol land +fre sco +pay roll +ab undant +stu ffing +mor o +c ny +boy cott +wend y +ele ven +pro voc +pil ot +tr x +be ad +climate action +ri on +assi e +ì ĸ +o sm +islam ic +ho ar +good reads +al ici +afterno ons +spoke sman +jo lie +it as +masc ara +âĻ© âĻ« +pre vail +beetro ot +lu jah +k li +dod ger + » +ru le +l n +scre am +ho bart +col bert +r tc +er m +pat ro +quo ting +s live +que st +non fiction +semin ary +prosecu tors +ve st +express way +g ge +nau tical +et f +ðŁİīðŁİ Ĭ +dur ation +cha ired +the film +fab io +she h +can o +ðŁĴª ðŁı» +with draw +! :) +cor pus +phen om +yel p +la wn +ent om +snapp er +but te +pin ball +pro xy +libr e +alle vi +n ada +gabri el +fo wl +eure ka +daph ne +tu nes +pun ched +wh ore +jo g +ren tial +man ners +o pe +wh ufc +gu th +revol t +sne aker +philharmon ic +ho ste +sovereign ty +ðŁĻıðŁĻı ðŁĻı +fish ing +sci art +fe ta +i pp +dump ing +kel own +gir i +dig its +sal u +san jay +twee ters +sp as +col chester +sc ab +ma dd +๠Ħภ+Ä ĩ +ged don +march for +do p +maure en +un plugged +di do +fashion blogger +up a +mex ic +tar y +pol ye +jame son +v t +grin der +mad dy +consult ancy +¬ ë +leagueof legends +ac cents +um ni +jane iro +tu ss +h ens +ampli fier +to shi +pret tier +pre vents +new town +red wood +vant age +ball ard +ar tof +a she +a sion +lac ey +ap at +gro ve +ภĦ +rw and +real tors +tra itor +bed ding +ö r +zi on +fla shing +cam pan +boom er +secretari at +ab ol +liti gation +cont amination +se dly +shred ded +in for +do herty +bench mark +ro che +skate board +sho vel +i zz +to pper +o ster +laby rin +autu m +k ong +hum mus +vi z +tech news +kla us +am using +socialmedi amarketing +i des +cast ell +ste e +underestim ate +cal ab +pa ign +b illing +unanim ously +g mb +fly fishing +hath away +commerci al +colour ing +skul ls +pivo t +te p +tb c +motor way +x press +construc tive +pu k +under lying +kir sten +mani ac +cha o +se ma +chiff on +ðŁijĮ ðŁı» +ver ona +kom o +stan doff +wi ped +c ated +bla ir +wor kin +m sc +bethle hem +swi pe +unexpe c +pe es +pe tri +orig ami +ðŁij ħ +mex ico +flav or +ru dd +cannab is +mar u +ri ddle +wor shi +sil on +sch at +ap se +tang er +bi ous +e er +questi oned +o zar +dan k +angle sey +char an +bak u +compe ten +re pri +bat ter +sa xon +cal ves +leng ths +$ $$ +âŀ ¡ï¸ı +immer sion +ga unt +car ry +cy to +b anda +shu tt +experi ence +el gin +mous se +ta z +ê µ +in correct +en z +b ham +mor on +so ver +ar un +ti pped +la ble +de arly +bau tista +í Ļ +mor tal +woo p +dt la +sho cks +dav os +ðŁĵ Ŀ +swim wear +her man +ðŁijĩ ðŁijĩ +z ir +neglec ted +grac ed +campu ses +av s +ar ora +swach hb +live pd +ac cra +enqui ries +shoo ters +kur t +vancou ver +brad ley +gar da +g ü +ol la +attrac ting +up ton +ne win +lu mia +furn ace +ev ers +e on +sw a +roo kies +a oc +v ss +bris ket +tor ch +yo da +heart land +tac o +ph ony +food bank +ab bey +bab ylon +u y +gre ate +expre sses +d andy +sc apes +survi vor +ron d +e ci +ha vin +ab el +chil dish +tor que +wav y +ur self +kanye west +year of +ale stine +o brien +al fon +sk ag +kore an +anchor age +val eri +de w +ðŁİ ¨ +land slide +car ole +christ en +go phers +af i +priyan ka +q q +power of +it te +pc so +tw ol +pr y +intellec tu +guer rero +pi les +wish list +w ren +time table +ë ı +prodi gy +gibb ons +. / +ne ur +anz ac +mur ray +vie st +pla ster +la ir +art gallery +inter continental +g br +bell ator +nam joon +mam mals +am el +y aw +saras ota +cam ar +bud ding +sum mari +aco sta +la sh +ey ou +post graduate +instruc tors +ti g +const ant +were wolf +ic os +cla s +glen n +bud ge +ðŁĻ Ĥ +er ta +sta ins +persecu tion +cumb ri +o ch +syner gy +hu ang +scand in +mid terms +comment ator +regar ded +perpe tual +bo iling +al p +lan ge +sch le +fac eli +twee ta +ri dden +ok toberfest +charlotte sville +ik lan +jo u +ch atham +b sc +ðŁį ¦ +stra uss +mel low +xx xx +happy hour +re actor +ww er +distr action +at orial +ðŁĴª ðŁı¼ +twin peaks +fay ette +a or +ko k +bro om +sy fy +ou se +am ag +Ø · +ubis oft +lu lu +hall mark +stu art +it ya +si deline +venge ance +re lu +sex ism +boun cing +un ites +gu stav +te ssa +stu mp +pro clamation +ima x +divid end +col by +ðŁį İ +play wright +un safe +co smo +ðŁĩ²ðŁĩ ½ +cup board +constitu ents +ang lia +ram page +ðŁĺįðŁĺį ðŁĺįðŁĺįðŁĺį +than ked +take aways +shro ff +de bat +kh ur +conduc ts +format s +à © +port age +graph ers +u ten +pre m +mo ines +condem ns +s ous +l ps +f cs +deal ership +leuke mia +bure au +ski d +guardi ola +ca ster +thir d +avoi ded +en cyclo +c sr +vi xx +analy zing +she ar +dulu th +shap iro +chan ting +stre sses +as be +mil itia +ãĥ ª +col lin +arsen e +sure sh +teach ings +yi xing +sh ill +nu des +sv u +clear water +war ped +pro life +artist son +it u +versail les +galax y +ax el +spring st +cal a +hu hu +sc u +commit ments +exe ter +poign ant +mo tion +conserv atory +row dy +rec alled +mu sk +emb elli +so the +âĺ Ģ +sto pper +sch ild +to pe +el mo +zi el +j om +barn sley +snow den +on tour +jour ney +hills borough +par ole +w ts +mo ving +ag ility +tiv o +ff ers +kindle unlimited +g wen +ann an +ah mad +tex tured +hepat itis +dra m +insi ders +tis sues +ãĥ Ħ +fc barcelona +cr atic +na acp +pe can +f gm +custom ize +concer t +g sm +pe g +p one +justin trudeau +super cars +happy holidays +bu lar +ado x +lap tops +digital health +destin ation +gradu ally +áĥ ¦ +popp y +ss l +inhi bit +star light +of fro +glo omy +x per +hal der +im plants +le to +hass el +a as +un told +en ci +liber ia +or an +con tests +il ah +sma g +sc out +mari anne +cr yo +schedu ling +lo s +kan e +stutt gart +ne se +law rence +da in +pho tom +car ou +ภ£ +g wy +national dogday +roa sting +band camp +kentu cky +stret ches +ke rel +ca she +ãĤ ¸ +sta x +tran si +dog gie +at ric +hal le +ci vic +brow ning +lein ster +cat day +high land +joy ous +in cumb +or lando +ro mo +col ton +del ta +car ab +ro tc +aster oid +goose bumps +mo logy +yo ko +an ds +tomor rows +red carpet +sm p +ca sio +ðŁ¤£ðŁ¤£ ðŁ¤£ +se au +rejec tion +rot ating +bi partisan +th un +mat i +bon i +ol l +ener gye +do it +l j +mother hood +lou ise +neck laces +el ite +ni x +l cs +en v +gl u +le sh +cran k +su sie +m clau +so tu +crow ley +rat ri +use d +bre ton +alfre do +ye o +travel pics +ti pp +elli son +sax ophone +me red +heu ghan +ta ine +f es +vi ro +suppo sedly +i as +dige stive +y le +li zzy +wildlife photography +bri anna +west field +ra ined +am her +ðŁĺĦ ðŁĺĦ +distribu te +bott om +pre serving +oil and +craf ty +de scen +col ling +shakespeare sunday +r wc +ang led +ci an +t ations +mon tage +me yers +france sca +ðŁĮ · +wi ggins +san ford +volunte er +car ra +bar k +vari ed +pl in +am u +kap il +rock ers +qu ind +br ane +in mate +ent al +impro vis +michi gan +re tweeting +progre ssing +mercedes benz +smo ker +physi ology +dor ado +watt pad +h wa +sr bachchan +w ga +vol atility +hi re +ac ap +wn ba +hein z +stit ches +kidnapp ing +bur ys +lim b +f itters +thumb nail +ton e +mir and +desi rable +ad dison +tar an +tamil nadu +spec tator +soci ology +amit shah +remo tely +âĻ ¦ +ham id +r ds +g lee +smooth ly +sch ro +er c +lali ga +he als +us f +ni shi +d hu +un il +h le +tro mb +bhu tan +pilip inas +se ung +whit man +te y +min ce +snow boarding +re au +k ker +av o +zach ary +ran veer +ti k +gover n +qu al +beck y +anthropo logy +att en +grocer ies +de bit +war p +sil icon +hawa ii +ðŁĴ ħ +pomegran ate +pe er +orang es +people schoice +end ure +ðŁĴĽ ðŁĴĽ +ãĤ¹ ãĥ +ac ial +a haha +stu k +imper ial +bl ond +pow der +kno ts +vin ce +wood lands +den a +watch in +mat cha +ma hat +galax ies +middles brough +k ö +stre e +resc ues +wal do +lero y +desp ic +real ities +tm nt +ha q +un o +pe c +bolly wood +blin ds +design thinking +he ms +and hra +ab sen +fan s +ste ch +shire hour +bla ine +shak ti +pu rely +ðŁı ı +tra fal +ke ynes +gr ate +to bias +spon taneous +satur ated +caval ry +pri sc +ðŁĺ ij +wh t +pas si +~~ ~ +vir at +patt inson +la o +weir do +sym pathy +ju da +occa sionally +cred ited +stat u +es co +hil ly +esc ape +dischar ge +se er +may nard +sud bury +z lat +or al +we er +encoun tered +sm elling +over sight +ê ¸ +that cher +mack ay +you can +fre ep +freed oms +prophe cy +ho e +ishq ba +dra ke +qu its +pel led +tur k +o vi +wesle yan +new music +leg g +ch eng +h illi +ay y +pan ties +ad versity +ad jac +vaccin ation +ju ke +ga c +exce ed +time sof +sta ining +ep cot +v ital +up ward +bethe sda +apar k +ma hi +camp fire +enchan ting +rha pso +h z +na ver +fa x +vali dation +ac ad +ny r +as ym +coordin ated +depar ted +all ery +var ies +spr ite +chap lin +ss occer +s wat +bre t +relu ct +tunes app +super star +reminis cing +o co +home grown +dough nut +un canny +la pd +thyro id +! âĿ¤ï¸ı +botan ic +bre s +sp ade +i ste +echo es +du lil +bur sting +qui ero +ðŁij İ +loy ola +amuse ment +ha ils +sleep y +burgl ary +âľ ı +ro gue +cot land +mo ors +low er +wic ked +ðŁĶ Ĭ +compet iti +argent ine +yvon ne +karti keyan +ili ary +gat sby +precin ct +six ty +na ji +cam s +practiti oner +ðŁĺ³ ðŁĺ³ +pu ne +neg li +juli en +inv aded +cali br +cla m +duba i +mu k +lan tic +produc t +fe dex +ï¸ı : +eu ra +dari us +s ling +virtual reality +home stead +ðŁı³ï¸ıâĢį ðŁĮĪ +pac ed +in ha +pul mon +la zy +premi ering +ma stered +in he +con gregation +ba jo +sport ing +new jersey +hor ny +lma oo +leng thy +du t +yo gh +swe aring +philosoph ical +pap ua +in ski +know les +dy ke +âĢ ² +to ken +mc guire +ri ot +probab ility +mc con +gro s +su mat +c ite +da a +on da +mad dow +che w +board games +spar ked +re claimed +ad hd +ny se +imwith her +equ inox +boo ths +balsam ic +ha zy +dor chester +ag os +se aw +moder ator +seri ea +ander sen +pilgri m +âŃIJ âŃIJ +itch en +hal li +x ton +nathan iel +mun ition +celesti al +ga f +zo om +mark le +pen thouse +cal e +s fa +bar king +tu cket +em ery +cal orie +li que +ad ar +mc nam +tor tilla +wood pecker +mo town +bad ger +ayr shire +scram ble +dd ay +cra ziest +per rie +cho co +cast e +i ot +wre cked +selec ting +uss r +gra ft +pun t +lab ou +ir st +ba ek +Û Į +su ki +que u +ach at +te ster +aug mented +wc vb +sin ks +ðŁĵ » +ra ke +inter ne +be cause +belle vue +une arth +light en +ðŁĺ £ +turn around +labe led +unemp loyed +twitter kurds +le ia +h ye +great er +ðŁIJ İ +tim ed +i red +e tt +limit ations +cab e +s out +bee ch +anni hil +re trac +yo ona +ang er +den nis +supp lying +di z +" ( +sc ur +gun man +su ho +sauvi gnon +ภ¥ +wi ley +land on +choreo graphy +pre historic +ðŁı ĥ +var gas +assess ments +pinn acle +di i +chamber lain +ì Ī +v p +present ers +deut sche +sun shine +sal utes +r one +bu siest +- .- +motor ists +hemi sphere +al wx +ps p +ow a +den ying +cho c +gu tier +han uk +mus kete +jait ley +se wage +t ame +thin kers +shi m +se quo +pap ar +middle east +k wa +ke g +patag onia +no y +bar ça +take off +he a +à ¬ +n sc +g dc +ðŁij Ī +mou stache +mel ania +thr a +â¬Ĩ ï¸ı +pier ced +ze us +fon ts +ber a +it iner +q atar +contr ary +ire land +i fy +ou los +commun al +fin s +un paid +pa a +ðŁijĩ ðŁı» +ri os +ou p +f iller +cafe teria +à¸ Ń +kas i +cali ber +z ulu +v sco +ts ford +dragon fly +smo kin +pi st +psycho logist +diplom at +we bs +buc cane +à® ¾ +motiv ational +du ne +ba e +c fs +with out +er on +i ac +ate e +pen sion +fra zier +en sis +sk is +par ting +ger y +territ ories +nach os +eni ght +ever lasting +msd honi +tel e +sp un +po di +sab ah +environ mentally +ce ase +beau mont +mar ta +kel vin +ho ff +sun il +n da +co b +sh ale +ree dus +un boxing +u bio +re opened +n all +capsu les +mar r +himalay as +swee ter +ja z +f mr +twee ter +dha ka +na u +de mi +d fs +ta urus +fad ing +it utes +ci p +over flow +jef frey +don ny +car tunesapp +ðŁį ij +prefe cture +danc ed +c pt +ple asing +ital k +earth quakes +ul ation +hi o +ãĢ ĭ +ant an +nutri ent +de ere +selec ts +enrich ment +r iti +tram pol +bl amed +j ia +contribu tors +chesa peake +pi geons +tribun al +mad uro +w su +ilo ve +effici ently +dar cy +war ms +ar ra +ec u +ho wer +strugg led +rajini kanth +ðŁĺ¢ ðŁĺ¢ +hou sing +str at +eli x +disp ro +raf fic +thi erry +na sty +c fb +staf fing +al ma +back ers +hen son +sky walker +reale state +roo s +ness y +chan ce +cair ns +c ci +pe dal +ly ft +cross word +wait er +only in +kru ger +k ir +alej andro +car tier +car rera +re paired +ou at +un clear +un breakable +today in +qu eries +jo dy +gen ital +win ner +to l +kelown a +fascin ated +ãĥ ¬ +sris ri +squ ared +spr ung +negoti ate +priv ately +av en +>> >>> +g ical +gav in +chester field +zu mba +or r +nat alia +impeach ment +mn l +car at +criti que +credi ble +trac y +tan i +musi k +jig saw +gam bia +tol kien +fe u +as per +sav ory +fo xx +f itt +mar lon +l rt +v ell +p br +imprison ed +i om +chu l +wind shield +kay e +ba a +chor d +s art +al gon +minister ial +nat geo +la zio +nor ms +ðŁijį ðŁijį +lic king +fut bol +un sung +dalla scowboys +sh red +distur b +dev ine +be ards +ch f +b day +ro sso +ig or +ay i +si ren +k air +sti les +ro f +mag nets +un cover +mou se +bang ing +si ghted +spe ople +impac t +row land +kir a +environ ment +love the +p sis +mish ra +gl endale +ca jun +o che +de ception +sex ist +stra ws +s ga +buff er +apost le +sp l +pop up +ðŁļ Ĺ +r g +up er +ball in +i dy +occa sional +national park +ðŁı Ĭ +u an +innov ation +ภ« +te aparty +re tte +counter fe +b ha +rec s +ig en +ðŁĮ IJ +humming bird +cu r +ha ven +la zar +pue blo +: : +zi onist +op ath +inver ness +promo ter +carto on +cabine ts +mahog any +surve ying +r ational +feel ing +testi fy +so w +oc on +ภ¢ +ne el +mar is +sol itary +che mo +rad cliffe +sim ons +ros ary +new er +jo die +re tali +pra wn +pad dy +hen ge +k ala +im plant +at y +bren twood +par adox +ene z +re designed +p our +wy d +al de +௠ģ +sol d +biomed ical +๠Ĥ +tt tt +mat teo +ys er +new ton +de bun +ner dy +loo l +wo on +elisa beth +ec c +wh i +ach o +salv age +sal aries +qu ity +navig ating +oph thal +con soles +re built +o pec +ast ers +sho red +set list +kathr yn +rhy mes +re visiting +ash ish +li ft +re post +sole il +âı ± +weal th +sa at +we c +king james +flipk art +field work +se gu +mo dal +bu b +are rs +ðŁį Ĵ +clo oney +pad dington +necess ity +guth rie +pen te +li mo +jo sie +ar tin +en c +l hs +betra yal +info graphics +i er +mo a +hear ings +bon jour +sym bolic +ag ro +wed ges +krist ina +wild flower +athle tic +photograph y +pe sh +ca hill +chi lean +gou l +fi oren +ðŁij ¶ +z il +sk im +bad oo +deli a +tre ble +n cc +ðŁĩ¦ ðŁĩ +a house +bul lock +sol itude +ا٠Ĩ +can cers +futureof work +hu tch +water shed +war mongers +sp illed +colom bo +mo th +associ ations +weigh ed +global goals +not just +christ i +tor g +swe ating +man eu +clu sters +âĢ¼ï¸ı âĢ¼ï¸ı +ta ped +ul y +tru sting +yu suf +te in +ra b +, ,,, +sin ai +audi ble +explic it +cro wns +sch iz +at least +ðŁĹ £ +de bra +je suit +ene gger +z hen +one sie +i it +ss f +gur gaon +chak ra +bear cats +k ran +k awa +reque sting +han over +g end +sor os +mer cy +lovel y +do omed +tim my +ku z +ul l +ab ram +sa ison +ãĥ « +clean ers +re mo +circu its +bar red +o th +mo ist +madele ine +gall o +u j +per mits +hea viest +car ols +az te +gior gio +flo ats +decl aring +us rc +min at +craf ts +pri ma +conven i +nickelo deon +danc ing +ceremon ial +blo gg +tw p +anglic an +she k +k nick +( (( +hubb ard +harve y +hit man +fen g +we some +for za +s word +op us +bro m +gi bility +z al +m unch +dance hall +gre edy +hd mi +re birth +ðŁĺĭ ðŁĺĭ +s world +figur ine +com post +k f +engra ving +gior no +st ana +k man +ham ster +compos ers +aj e +func tionality +pol k +is ons +air planes +te se +hor rors +musc at +gi ven +sp ence +ðŁĩ¸ ðŁĩ +eli ot +ach illes +fre ck +crypto currencies +sou ther +hal o +bor neo +polit ic +hahahaha h +up state +si ena +obsc ure +hau sen +lloy d +happy friday +motor bike +bon a +americ as +hol s +- ( +spor ty +un aware +reven ues +christop her +bank sy +av an +ev apor +com press +eyel iner +to dos +buff y +renewable energy +ly rical +ar chan +rapi st +fair trade +lma ooo +beat z +pro active +la pse +ir ical +revers al +po de +mcin tyre +mac au +ãĥ ķãĤ +nash grier +f sa +g all +çĶ Ł +perpe tr +il ya +configur ation +% ; +str ange +rac i +ภĩ +pic kups +kov sky +mam mal +w ps +g able +compar ative +z h +save our +da vey +on etsy +mu ssels +mis er +cri stina +electr on +cra ve +lo ren +precipit ation +m z +ðŁį « +vin cen +snow board +no ida +ah n +marin ated +g tr +town hall +min is +bethe l +adv an +su ra +shi el +fur ry +ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤ +lyn d +so il +sc ence +sen eca +shar jah +dick ens +credenti als +av ar +per k +requ iring +pre fer +j ian +de ca +r ach +ing for +del e +be ep +ðŁĴ » +cis ely +hu ddle +green sboro +haw king +ho ax +hang ar +ç ľ +mis o +lo vin +gre ta +ab ad +logi e +at an +snow flake +mahe sh +fear the +al kal +bobb lehead +ba hn +ju dged +fu tu +feli x +ðŁį ĵ +pi ke +der iv +notic es +au er +dis super +or da +wi pes +am ino +stri kers +foo tb +dram as +pun ching +score less +heming way +bi h +bal lad +chat ter +am mo +kle in +fabric ation +kari m +z end +hi sto +vol ta +rock y +marke ter +xtre me +sequ encing +paradig m +cle ats +boom ing +âģł âģł +block ade +promp ts +yogh urt +pur pose +nu r +regu late +nois y +ing rid +bird watching +bar tender +Ù ĥ +wor dof +cha otic +shor ty +el dest +z app +onceupon atime +fl yo +rit os +mike quind +ðŁIJ ´ +regi stering +. ] +ad ol +gg gg +pur ge +kid lit +ar bor +val ves +synago gue +o th +unanim ous +veri fication +dar rell +ãģ Ħ +vander bilt +tape stry +pro sper +did dy +dra fting +de cep +marqu is +st int +michael jackson +pee led +men us +bb b +sc are +ema il +wri gley +it is +f ell +some thin +bar ra +ed gar +di pping +pu ddle +sla de +lear ner +jal en +ðŁ§ IJ +the daily +mikequind azzi +ju x +iq bal +mckin ney +ra iser +ef an +dr one +cat o +pic ket +cro we +l att +uk o +giuse ppe +hin i +synthe si +ponti fex +song writing +to d +swit ches +din ners +h q +gabri elle +pensac ola +cir cle +expo ses +ev s +riyad h +pro men +o ck +sa j +cit ation +brew co +jo si +ep aper +dri f +point less +tang led +cri pp +line ups +fairi es +daz e +mour n +bla dder +sal z +bur undi +book mark +the people +sub sequ +princi pal +sk er +court ney +a oki +rac ers +ad m +mom a +critical role +hou n +shed ding +sa ka +ace ous +mck ay +hus bands + ½ +me da +accu sations +ro sel +nc is +witne ssing +or ama +go ds +hil ton +el man +ÃŃ n +meg ap +cra ven +announ cer +crit eri +sheffiel dissuper +milit ant +consu l +hoo ded +aby ss +b x +ma dam +lo cu +mary am +manic ure +grat is +ac tresses +ros ario +this dayin +king ly +gn ome +cel ine +r ous +he el +lil ac +vish al +ab h +thor ns +s ls +ne al +construc ting +be ren +s lang +ma ins +far ra +sar ko +pai ge +gu iller +l ala +ice berg +nou n +plann ers +u mmm +ou ses +ill ary +ma an +box ing +zi pper +srin agar +migu el +o str +mp o +responsi bly +lan terns +appli ance +x b +gren ade +neglec t +dy sle +ham mock +ne ctar +wit cher +r gv +di ence +ser bian +seed ed +cru z +bi sh +sp he +e q +sky rim +alge bra +phil ately +bungal ow +ge off +y ves +demand ed +consider ations +the vamp +pawan kalyan +co ded +grit ty +erup tion +se infeld +uni denti +ëĭ Ī +wor m +ac us +se ung +dun g +ro land +su d +di visions +ab lanc +shor test +j f +p oun +plant based +be to +tough er +mc o +don et +mark us +v fl +ðŁı ł +open ing +co ward +caber net +o xi +burle sque +sand ra +su mo +consi st +tho t +cay man +motor ola +gutier rez +d slr +y w +no bel +nov ice +moms demand +grun ge +sp or +d cc +pre sses +sli st +allot ment +voc ational +ft c +pu ja +lo ven +utt arak +tan dem +sh ep +come dians +anat om +cant wait +healthye ating +west side +mar gins +chi ang +asbe stos +stupi dity +proble matic +fit bit +: $ +ceil ings +shu a +protec tions +bio tic +beng ali +re sts +bien nale +tim o +cul min +e minent +affe ction +unbeliev ably +individu ally +canvas sing +wh itt +nov asco +chin son +h pe +go w +gloucester shire +pa o +thresh old +chev ron +s ine +we ther +pp ie +aqu ino +antwer p +âĸ ¬ +po on +inst af +equ ine +cinemato graphy +nbaf inals +vali ant +kil kenny +te rence +syste mic +sr l +p ound +made ira +pl ough +tre cht +mat ed +mp d +ransom ware +ph in +li qui +bb ce +boom er +i standwith +con ju +r te +nar a +foo lish +da shing +vier nes +br ite +da u +juni per +ai da +you now +ra zer +de i +repe ating +comfor ting +adjac ent +e to +ca sted +chat ur +mu er +syn th +san itary +mac le +independ ent +law ful +e erie +h or +ðŁĴ Ń +am rit +vel o +station ery +mu f +may may +contempl ating +elabor ate +gre gor +dri es +ac col +ภļ +schwarz enegger +ill nesses +day break +follow back +collu sion +electr onic +jo vi +hiro shima +ta w +hom ec +mic ah +qu itting +fro sting +ben fica +hel i +s ical +pic cad +corpor ate +ment orship +you are +sing er +shi va +ru ne +ing er +ri um +play able +doo p +wil low +ter re +ni p +at d +war bler +profession ally +er ase +proce ed +pedestri ans +mis chief +ben ding +alas kan +c kett +mo p +dd les +shut ter +ge ared +atene o +ma deline +g ations +o sha +der ick +sw ild +an gry +pat ents +hun k +decre ased +fr y +ðŁĴĸðŁĴĸ ðŁĴĸ +sal on +quant ities +d ario +ni gel +ku ma +jen n +happ ye +xx x +rex perience +pro s +au sch +rele ssly +ham burger +fuku shima +er ne +stat ec +ren d +may field +j one +lef ty +bern stein +sm il +gener ates +fore station +band its +ta yo +r ca +ac ci +rodri go +kn app +elo vers +vege tation +u ral +le ft +ħ ï¸ı +worl dre +sur i +embar k +w son +ba you +mu ller +mo vers +ðŁķ º +presby ter +l f +cre e +bat b +sal am +demonstr ations +an ec +n pc +it ics +to graphy +re inst +thur st +tal e +off ences +smart city +bro tha +ofthe year +in valuable +ear n +ðŁijı ðŁı½ +kre mlin +gra dy +town fc +guern sey +ma ha +contag ious +dre x +be en +( £ +nati vity +k tm +somer halder +comp ounds +íķ ĺ +" âĢ¦ +af g +ott news +h ound +fire fly +cil an +donet sk +volunte ered +ak ira +è ª +sing ul +st h +dro wned +mand o +he ir +ðŁİīðŁİ Ī +tax is +y uki +vel d +k ans +el k +ran ts +hash tag +t eng +ro g +a at +gru b +e ber +in india +colo ssus +sig ni +so ever +mile stones +der o +differen tial +phu ket +master mind +an gh +mel ani +bro ker +actor vijay +stun ned +continu ity +af fl +vo cal +perenni al +fianc é +in complete +hun ts +re issue +domin ates +tur meric +ro am +ri on +bag ged +nas sau +fu t +x ox +national trust +jo ye +san o +hearth stone +dis respect +le es +h se +siber ian +offe e +re stock +wolf gang +re gan +plan o +un wind +re par +mil le +] , +skul l +fat ally +concep tual +ðŁĮ ² +f é +ber to +b ms +u a +mag na +notre dame +le te +la undering +heartw arming +buffe tt +go at +pe abo +wind mill +v ac +continu ally +az alea +mem brane +can cels +make yourown +athe red +p to +tor pe +ðŁĺ ł +ðŁĴ § +sc ares +le aking +z et +pix els +ac i +kh il +marath i +ðŁĻı ðŁı½ +u la +tam u +chandi garh +z agre +aa b +pronoun ced +aubre y +sand er +pun ta +har low +ic elan +celebr atory +so t +unci ation +stru ly +mc dowell +deepi ka +remin ders +my stical +ct c +chat ted +s ica +bar gains +ch hat +ru bin +m net +oiland gas +pel ican +o at +mor ality +k our +i h +nu clear +gc u +ric her +vene zia +m ma +le ith +ac company +rich mond +sports net +ba ahu +smu ggling +mm i +ðŁĩ®ðŁĩ ª +twi sts +sahi b +.... . +amb itions +il lo +histor ical +fo rec +show biz +pon ies +chas ers +remo del +will ing +prince sses +am ple +cushi ons +ac les +lot r +da ch +an the +in corporate +new bury +ki ri +fried rich +ab v +ball ers +alber t +ðŁij Ń +let i +nan op +ci de +anal o +n sf +)) )) +griffi ths +valen ci +ro ano +fun run +babys itting +ca day +ent re +u ck +slu g +tic al +the sims +ro ar +car ney +g am +sto we +fi d +bun ny +sham rock +pe cu +mol ina +go cougs +con tributes +transform ation +mo y +v aj +sever y +antioxid ants +thir teen +sight seeing +l j +reversi ble +odd ly +hoo kah +nou vel +hal al +fe i +stab les +mul t +ho pped +bra ids +inter change +ghana ian +ww ww +eth no +con junction +ago v +ye ti +earth and +ts p +con serve +heir loom +metaph or +woo f +tor io +self less +n wa +em ilia +yl ene +y xe +gi ar +moder ating +pro bz +b fi +ne er +du mmy +hanuk kah +we bber +k v +eye brow +dag ger +su mp +ra ges +ork ney +tb o +hal sey +assign ments +tr onic +scri b +co on +an war +# âĢİ +jal ape +flori da +qu aid +haw keyes +âĻ¡ âĻ¡ +street car +ro g +dat lantic +gran ola +un changed +expect ation +Ù ĩ +mar lin +gu mmy +ðŁĻı ðŁı¾ +awareness month +oil painting +mu th +per ch +jun to +villa gers +mor g +che ated +web comic +the future +d ps +la kings +men tioning +vo or +ident ities +accor d +mc gu +l pga +rum our +massi vely +m pls +heal y +d ate +sp oli +re visited +on t +al and +scru tiny +lakel and +bl ending +< / +an kara +jami edor +metab olic +f ences +ann y +å ħ +semic on +oo tt +space ship +wack y +le ta +ap ac +she e +in herit +do res +ðŁĩ¨ðŁĩ ¦ +gent e +tw ick +ri ms +gal ve +de ville +king fisher +scorpi o +ow l +al ar +vari an +ðŁĹ ĵ +vene tian +star dust +then orth +q ing +har rington +consul ate +spectac le +ho bbs +tur ks +gre er +mat ing +ðŁİ Ģ +ðŁĮ Ģ +direc ts +í ĭ +pompe o +vo iced +la os +tz u +pro me +pri sm +mer c +fortun ately +bc fc +mcdon nell +not sorry +smi led +t ba +for war +mid term +dar by +we instein +up grading +wol ff +bron co +cab ello +ðŁ¥ ĩ +fi able +shar pe +bat tered +sat o +myth ical +instap ic +pre pped +eni um +e spo +di aper +explan ations +who pping +ragn ar +pe el +antibio tic +l acks +harri son +li sm +au l +qu ail +martin a +sent encing +sc ams +di di +tr onics +ãħł ãħł +go ff +za in +param ore +cha ined +clin ton +li ff +cott ages +em on +reve rend +consu mer +ce an +t any +lum pur +e bay +sto ol +ðŁĺ» ðŁĺ» +ta pro +h ath +modern art +just ine +prover b +app y +tra x +mani fest +am bu +nai k +pe pp +r sd +mer chants +kitch ener +shi fted +li zz +âĺħâĺħ âĺħâĺħ +âĢĶâĢĶâĢĶâĢĶ âĢĶâĢĶâĢĶâĢĶ +uto pia +tom o +ou ted +com ers +chiroprac tic +book club +cin dy +pro hibition +se uss +ë¯ ¼ +thin kin +rr rr +go fund +t ack +om b +catastro phic +ling u +guild ford +bo td +ॠĭ +plan ter +^ ^ +win k +kath mandu +sto ppers +smooth ies +re efs +hin d +bell amy +Ħ ë +waste water +vo or +nat l +! ] +re el +y ap +scoo by +work space +corin thians +bl un +obli gation +g bbo +dy son +cra vings +ell ington +dap l +wre xham +earthand clouds +uk runchat +positi oned +kal b +four square +jo ck +im pending +even ing +ath y +pro claimed +c ites +ann apolis +san i +mar th +ir l +accom mo +ka a +fin a +y aa +di sper +ec ar +bha k +will y +ðŁĺĢ ðŁĺĢ +mcder mott +mo j +gener ational +u said +train ing +lon ely +lo res +impe cc +âĢ IJ +beav ers +ma ki +he b +aap l +å ı +wolver hampton +leader board +me u +c fa +easter n +hu r +civil war +ou rage +hor ned +le high +awar ds +evi dent +gi gab +r ous +ma del +ro byn +ur gently +k ors +en as +heis man +bam bam +fab ian +f om +evalu ating +assemb ly +out sourcing +hun tsville +ðŁĶ ª +justi fied +cashi er +sp aper +buc keye +analy tical +illumin ati +au tho +o j +sha de +geel ong +wh ey +he aton +terri bly +ele k +un charted +sd live +moto cross +her mes +dar shan +dar lington +cash mere +gri pping +cilan tro +pun ish +... : +ðŁĴ Ħ +inst ance +der i +lo bal +muk her +sp ar +thin ker +fre mont +com piled +color ado +vig ne +sm d +whe ad +villa ge +le ek +formula e +ta res +persist ence +?? ???? +ped ago +he z +alzheim ers +vul ture +off ence +is great +suff ra +kick in +h mmmm +broad way +ï¸ı @ +art i +alli son +endor ses +ry u +lolli pop +soy bean +kend all +cer a +inv ade +( ðŁĵ·: +conver ter +car pets +ho bo +fr it +pe ac +es qu +ern an +ou f +an il +di ffer +ch ing +bre cht +sp g +daven port +stra va +sever n +n gos +stor ians +fe te +parame dic +j hb +al amo +sne aking +gold coast +roof s +isi l +depic ted +projec tions +nu mb +o ss +ep i +glu cose +zid ane +infin iti +íĺ Ħ +ran som +ton ics +fal k +g ler +ou tw +re ss +week ly +the on +n ole +ðŁĩªðŁĩ º +vol ley +sum mar +neg ativity +sam son +ye w +aus votes +ju l +ju dy +f art +pra yed +pal ate +multicul tural +double header +cycl ones +pier re +ãģ ¨ +âĺ łï¸ı +rt w +conver ting +wir ral +l ari +ir relevant +austin mahone +an che +ya an +sd f +$ . +explo ding +ulti mate +prof ici +gofund me +cell ence +ep stein +bul lied +sep tic +à® ¤ +lu mber +cu ff +vsco cam +pl or +ภ¥ +se ok +ro to +venezu elan +sor ta +spir ited +daniel padilla +team sisd +radio active +icelan dic +ðŁĴ ¤ +ver e +accommo date +shi pp +ot ter +ol ina +e go +su la +san antonio +de as +simil arities +âļ ¾ +y om +bro ward +å ° +can cun +veri fy +on te +candle light +ìł ķ +inf ants +az am +ðŁĺ ° +le ven +un stable +bloom ington +x ford +con tour +y p +innov ator +histor ies +po y +lolo lol +ex pires +cat alo +bill boards +an ab +el ic +novasco tia +fa ire +ìĿ ´ +rock well +gr ille +az tec +joh or +ur struly +fi ren +dun lop +id le +port man +jo es +tx hsfb +hol m +cham ele +under world +lo ss +ti em +therap ists +past ure +pa ste +ing now +vul can +ra gon +lar kin +o shi +ho co +child hood +umb rel +success or +kath y +iz en +° ï¸ı +share holders +ol ga +ai b +he ap +fl aming +ro u +air tel +rat t +z ane +vo w +thor ough +sn ag +par th +un conscious +ve y +new release +gh ee +croati an +facilit ating +swan son +astor ia +to logy +master y +ðŁ¤ ij +bil bao +trou pe +the ori +chey enne +ro tt +shore line +gra sso +master chef ++ ) +vi x +ellen show +as g +an ak +ku ya +safar ilive +debu ting +blu m +list ener +v ins +book shelf +smart cities +makeyourown lane +; ; +ðŁIJ ¯ +ri zz +on ward +bull dog +bear ish +vir uses +fri gh +lin den +we iser +sn t +gon a +dre sden +fl anders +cu k +wheel ing +ba u +atu esday +surf ers +swi ft +mc call +arbitr ation +aw d +mon c +b ine +at x +re fr +mi ro +po sey +n are +rit ter +âģ ¦ +play book +blow out +sports manship +s oooooo +malay alam +gri ms +bur bank +infin ity +sar gent +oit nb +joseph ine +ski pping +par kin +excur sion +semin ars +jo har +par tridge +post game +ll ll +blan che +temp ting +m na +lu ka +is ers +to ffee +bar ron +he mmings +sa e +go hawks +cu pid +li mbs +con se +un common +z ada +head shot +so ils +pione er +mam ma +sem itic +pan dey +jamiedor nan +spl its +vel a +son i +ra ff +t mobile +âŀ ĸ +pra wns +lit er +enjo yment +egg plant +tu b +cultur al +us ic +suspici on +sy cam +summ ed +ma du +ho ck +up wards +eye ing +ri ve +assas sins +âĤ ¬ +out fy +chi ves +t ner +la is +por ridge +sad dest +w cc +vick i +sna ils +biz italk +mill an +ðŁĮ į +sam oa +j ing +mi key +gu j +chel ms +eli gibility +arma da +thro p +surger ies +ãĤ ¿ +mo hawk +ex its +me m +is lington +c me +land fill +kait lyn +ðŁİ ¼ +combin ations +tomorrow land +ver b +cor a +pre cisely +na om +ðŁĨ ķ +shr ink +sof tly +merce de +mand el +poo dle +ball erina +sop h +jux ta +y at +ary an +hesit ate +lo wered +gu lar +dungeon sand +ron an +my ri +sp f +men opau +gra sp +pa thi +fe asi +fla w +shi story +ste ward +gg le +fay re +cli que +credi bility +yo g +sec tion +mu sko +se ville +no tt +cal m +mate o +indic ted +fi ba +by l +lin o +u kin +!! # +enig ma +siri us +bu sc +ðŁį Ĭ +mac kerel +psal ms +a at +tomorrow spaper +ðŁĺ ĸ +p fc +........ ... +shre k +mul let +o sh +danger ously +immen sely +am ur +ðŁį Ĥ +pro por +sy a +london marathon +abo ve +obli gatory +pro v +ra cha +alex is +pri mary +sh h +ether net +d stv +cou gar +un lucky +ni l +steak house +mel a +fc bayern +cause way +ca therine +fluore scent +nx t +to kyo +au sp +releg ation +qui zz +shored itch +proud tobe +promo s +inter acting +home brew +da esh +w pg +stead ily +provin ces +bal lots +i ah +al to +< << +you u +ri ley +prefe rence +tra verse +incen se +am munition +ho dges +# @ +hail state +tart an +witch craft +vent ilation +liber tarian +! âĢ¦ +ow es +% ! +ong chang +bru shing +le ic +fi ber +under attack +down load +ex pir +hy o +pompe y +mc bride +y ag +stre e +com bat +ten ding +ai ra +gug gen +ab ra +in na +fli ps +aw al +m ach +dol lar +inspir ations +z um +o du +it ty +video game +aqu aman +har u +bel fast +je b +but ch +us gs +calcu lus +go yal +mor gen +x finity +stand up +contrac ep +sab re +na be +in secure +gener ously +epit ome +l w +t ca +narr atives +don nell +pand as +ber gh +tu t +ker al +fel icity +br ampton +quinte t +nom ore +ðŁĶ ij +lo i +alham dulil +ðŁĶ¥ ðŁĶĹ +ston er +shaw l +clin ical +bren dan +gon e +fla wed +tri ppy +j g +al location +po aching +ve vo +mo cks +lef tist +bon uses +condem ned +abil ity +st ating +microbi ome +bio logist +for you +wahl berg +ss or +ift ar +w ul +ÑĦ оÑĤ +pom er +me me +ver te +tre ll +tra it +in let +hormon es +deliber ately +vill ar +battle ship +p bl +tw enti +ho kies +dal ail +say a +may fair +han s +die ts +⾨ ⾨ +od in +hot spur +pap i +k ana +k amp +fin na +flo tus +ti ans +unic orns +tribe ca +chang ers +fore ground +out a +inv aders +gett ys +tomorrowspaper stoday +mac millan +hand written +w fp +u de +state of +base d +âĺģ ï¸ı +cas m +psy ched +histor ians +fol d +d da +ag grav +p ans +green way +au sv +ðŁĺ ¶ +shradd ha +inde x +be sti +zim mer +t ness +eye shadow +ot te +go ts +distribu ting +pro min +yo l +ace a +tram rahim +hoo per +supre me +jam min +intu itive +quali fications +sli m +sid di +jay ne +tri pping +g tx +pun s +e manuel +om g +mid summer +in to +succul ent +ri en +new mexico +o or +hoo king +in f +ðŁ¤ Ŀ +flir ting +na hi +g friend +t ps +hel ix +z s +on ie +ct f +kri s +irresi stible +fla p +ðŁijıðŁı» ðŁijıðŁı» +us wnt +ru d +ram ps +pin oy +ot w +lol z +low ering +favor ite +t mc +phra ses +her mi +aver aging +em br +ben o +estu ary +sle eve +ribb ons +ta sh +ภ¹ +x f +aw gs +sun ited +brew eries +anir ud +pun ches +ol die +ip ads +wi fey +land lords +d ji +gun ner +íķ ´ +tex an +ex op +cas sandra +s off +ðŁļ « +igh ton +bak ers +awareness week +v all +ear p +bts bbmas +apologi zes +âļĵ ï¸ı +was ps +states man +snat ch +watch dog +ra fi +after party +spi ke +j er +peri ph +r nc +mu ll +le en +shi es +li eu +urstruly mahesh +mer ton +de sai +shi f +ðŁĮ ± +pe dic +gos ling +arrang ing +ww g +gen y +you uu +netfli x +e ttes +k wi +bernar dino +am iga +Ø ¨ +kashmir i +t ings +emer itus +de cat +ab domin +dc i +pha ses +d jan +be am +op ry +i shed +the ellenshow +the st +habit ats +to ons +mclau ghlin +ri pper +micro biology +tal aga +clu eless +ss u +cro che +bro mance +longe vity +zagre b +prev ented +tra ve +spo ilt +darry l +migra ine +al cat +dd dd +vi v +ser pent +mat tel +jam a +con quest +î Ħ +sam sung +presbyter ian +ket ch +fire fox +mo tif +le c +cho pping +cher no +j ann +ðŁIJ ° +pro lon +wake up +conver gence +mersey side +heart broken +lo oming +hal lucin +mai ze +commun ism +mo h +twitter storians +serge y +res eller +favor able +ed gy +re iter +mal aga +live me +ka hn +pul sion +big g +kim kardashian +ati o +tyr anny +ru ption +q ant +pro ven +by z +pu shaw +kri stin +e er +tar dis +ri z +awak en +mi ko +un documented +path finder +indirec t +resemb les +h ler +conce aled +scand al +re im +d nb +cr itters +attend ant +apprentice ships +aa u +scre amed +l su +fa h +har bour +ed d +bat sman +li ss +mi sha +spani el +it f +advan cement +fa c +close up +cecil ia +medi c +narcis si +lav ish +gi ac +ma ys +le it +wine wednesday +pushaw ard +let to +curren ts +bug atti +out ine +w j +un do +ler osis +devo tional +ðŁij « +on na +fais al +sa una +himach al +am ii +à® ® +di zzy +screen writing +ph x +sp n +ick i +ag irl +fi shes +wb z +pi m +bo ar +ac id +! .. +rocke feller +n ga +dra stically +simpli fy +dru mming +autum nal +gur mee +lor de +jo ann +give up +b our +am ura +der land +sim pler +wat son +tri dent +concor dia +bel lum +bre k +dum plings +vi on +dungeonsand dragons +sp ri +ascen sion +wil datlantic +u st +rob ins +legi on +insi st +jar o +gue ss +so b +bigh it +pool side +negoti ating +mc gill +bil d +techn icians +miti gation +ajay devgn +b to +ant en +cosmo politan +ðŁĺĬðŁĺĬ ðŁĺĬðŁĺĬ +patri oti +temp er +promen ade +nav ajo +nam m +wrink les +dc fc +le ach +bru nette +r f +cout inho +al ti +tradition ally +op tome +na z +accord ingly +rec ard +de ets +sw ell +po sure +whit ening +strang er +illi on +here ford +u wu +ro bber +cotsw olds +cl en +gor ge +nam aste +re lish +gri ff +adren aline +bla sio +val e +ê ² +toler ate +rail minindia +jen sen +ho ven +el lu +ob sole +eisen hower +unidenti fied +than niversary +body guard +Ø ¯ +i dge +sch al +stock port +sn i +re taining +po po +pix ie +oli thic +ki er +ha jj +sa z +cor bin +!!!! !!!!!! +v it +me gat +de h +circu it +af fleck +theore tical +hope less +u ab +slu mp +b ice +jam med +let stalk +can i +side ways +labyrin th +re fs +ha hn +jare d +ðŁį ¹ +jam bo +ph yl +enhan cement +c tr +ful lest +se ye +do ba +cho ic +yo s +cb j +andr é +re watch +pri ma +doctr ine +for gets +u hm +ar ound +u le +art lovers +shi raz +har th +ex tor +Å ¡ +unexpec tedly +eli us +y x +em my +se ac +ðŁijĩðŁijĩ ðŁijĩ +correc ted +com bu +wom anc +cou gh +what son +publi shes +divers ity +back bone +lock down +mesmeri zing +nor te +ma b +desig ner +í ģ +ra gh +mole cules +get outside +the beatles +semicon duc +nach o +lun es +ham mers +sul tan +o on +fe ren +att ach +ar qu +uttarak hand +s ash +; - +tre ad +i ko +ar thur +scandin avian +r ation +ga el +charge able +fish y +v ma +hand bags +char a +ay ne +de fam +sett lers +qad ri +pal ais +in wx +apocaly ptic +poo ja +a es +at ories +proof ing +n lp +ts la +v ina +li do +dee phouse +informat ics +v v +pp ings +di ss +à ¯ +uhur u +st ony +betra yed +b aff +my ra +as pen +allow ance +tam ara +ci f +cor bett +ser ge +di go +ambi gu +pain ters +p cr +p ca +nom s +lo ft +ve e +opend ata +ðŁIJ ± +alex andre +identi fies +fantasy football +re production +brom ley +ware agle +mm er +p ss +cu es +ay at +hut chinson +sar ac +jack man +ira h +ap ink +col s +aussi es +ex ecs +day ton +ðŁĻ Ĩ +im v +har am +chuck le +authent icity +ar do +incub ator +ภª +photo shopped +embrac ed +fight for +gor man +zz zz +schol astic +cri sps +te apo +mid night +ga ine +col lier +s ate +de tte +å Ń +imag ine +i ff +tw ili +i fication +teat ro +nor ma +es ur +emergen cies +rise up +r inger +hass le +cait lyn +tranqu il +vers a +se b +over look +gin i +bo go +se re +may ne +henri k +contamin ated +rhapso dy +pro portion +wildatlantic way +âģ© . +organis ers +tran e +stand ard +sper m +laun cher +ric ci +her ts +paper work +showcas ed +mer yl +pen a +p imp +disa strous +^. ^ +phar a +x is +fron tal +sw irl +sp ills +swag ger +smart watch +sizz ling +savi our +cat ar +bb cr +refurbi shment +dr is +citro en +absor b +patrioti sm +il leg +chro mo +fresh ers +ru s +lim iting +ef ish +down ed +man dir +hazel nut +p all +mac on +disappear ing +quali fies +bo on +bar racks +am ine +gen dere +ðŁļ ĺ +j es +ãĥ Ń +qu ito +middle weight +sch au +quad ru +aci ones +limit less +ðŁijĮ ðŁı½ +ch man +ar av +regulat ors +it up +batter sea +mil ford +g z +tic king +gh ou +cru shes +tu tu +dread ful +fam ine +for change +dalail ama +ðŁĴ į +whit aker +hash mi +h us +vo d +bet te +aa ah +iso o +ðŁ¥ Ī +ha ar +la ine +b v +all day +spr out +indie games +free bie +gree ks +but ler +ill in +ha al +ware ness +si ma +public health +gam a +wa a +oun g +goo oo +okin awa +off enders +im pose +ho c +young ster +story teller +sc ap +figh ter ++ , +whit es +music monday +re za +go ducks +bri a +mi um +cas per +cru mbs +a ad +marti alarts +ch p +ri gged +tn g +harve sted +sa k +do jo +mill wall +b nw +oc d +histor yof +t mr +si rens +fan ci +caregi vers +vir a +son i +recur ring +acknowle dged +ðŁı Ł +oph ile +bu cky +stre ssing +roo k +di gger +vi val +san do +fle et +si ers +sel caday +refre shed +anti fa +a que +po lo +disappear ance +de mb +âĮļ ï¸ı +ren ted +ber ger +g mb +cu la +ss al +goo dy +u hh +marcel o +w anna +soft ware +shop small +turt le +tom as +fri sco +ðŁĺį ðŁĴķ +jim enez +c su +day z +an do +wyn ne +choreo grapher +cerv ical +trail blazers +ed g +zend aya +travel blog +el s +whole some +co g +lab out +ar ney +del le +su isse +ma si +ine se +om be +fi ddle +re claim +pa u +wat cher +sla in +ber ty +opti mum +el ites +min is +tur key +patro ls +ger ard +au reli +wild ly +wal tz +br gy +w ob +cre st ++ ++ +ve z +fro sted +davi do +the x +param edics +p into +han k +du pont +ur g +fo stering +micro poetry +spec tre +---- > +ne uro +fri da +music al +galve ston +e ffic +sc ape +pal azzo +th all +pro visional +p js +au re +ðŁĶ ľ +mam amoo +kit ties +cre e +wa k +lo ool +lu pus +cn blue +à º +ðŁİ ¬ +rac ed +tro se +om as +stri de +co ors +⤠µï¸ı +in comparable +cy ril +broad er +arec lipse +ðŁį Ķ +inter val +ti ru +co working +w aco +a ham +a bee +flouri sh +the times +ol ini +kick boxing +lu cer +at la +as un +casser ole +mi aw +lobb ying +jan ice +cir que +re flex +le ary +sanat omy +tem pest +se mb +mur dering +us av +ro bo +on et +p cc +nati ves +life of +sa ha +ruth less +rel ates +appeti zer +pye ongchang +nor d +er u +a thing +ug ly +pl ying +bran ce +organ ise +kend ra +dat o +chees es +par ma +burn out +a stra +pre toria +adjust ment +uk u +sl o +li ken +fav ors +cli ve +be ets +snow donia +go tv +sy n +open house +pan i +portra yed +sl ated +me cca +ren al +supportsmall streamers +staf fs +da o +bi ker +vik tor +tit us +admi red +ðŁĵ ± +hurric an +he ats +gl ory +photo genic +mer i +de por +burn ham +or angu +dj ing +impre ssionism +ign ition +ca i +w ynn +de pe +cove ted +colla gen +sau s +or nam +administr ators +ss on +nh politics +hahahaha hahahaha +aspir ations +r gb +swol len +so we +sc r +diver gent +hou ghton +han oi +d ory +ni ki +land ry +b cci +ðŁijĮ ðŁijĮ +is mail +tri pod +her d +bhat t +dress age +tab by +ingu ish +hur on +à³ į +à ł +to das +evangel ical +chor ds +st john +slo ppy +marty r +face book +ali ght +sen sei +kath niel +r ites +zi one +u o +revel ations +weight lifting +pan o +nc wx +ac ton +à® ķ +Ø ² +som a +à¸ Ĺ +respec ting +mar che +fore man +be tty +ki k +shi bu +po on +argy le +k swx +et z +mar bella +brac kets +stand by +fire side +defi ance +v ex +britanni a +in habit +appo int +piyu sh +le ash +sci ento +fla sk +sen na +> : +at roc +sand erson +id lib +dhan ush +ðŁĺ Ļ +en thr +hit ch +de dly +al ley +dor k +mon do +cudd ly +mis sin +ye sss +night ing +j pn +w ary +ump ire +ma z +ê ³ +bab s +ĭ ãģ +stan ford +posse ssed +exce eded +ðŁĶ ¶ +wall art +tra p +j il +hi bis +sp ying +scri be +khali l +trans lator +lu mb +di zed +ch c +super vision +shut ter +ja g +_ * +yester days +ms f +hi hi +gonz aga +gille spie +vive k +ec static +this morning +ch us +ed es +ston ed +be es +ðŁĩ¹ ðŁĩ +tur in +ho ver +at rics +ster n +sam heughan +auti sm +mi ya +eye witness +writ ings +travel tips +chut ney +px rtg +keny ans +my stic +k rit +/ $ +red head +world ly +am us +op la +le ve +gab bana +se en +o clock +gang a +keen an +sc ent +ol dies +go green +corner stone +comp ly +con cours +ðŁİ¶ ðŁİ¶ +ha an +con fis +aw son +cle op +î Ģ +su zu +sau té +al gar +subscri ber +este emed +ãĤ¤ ãĥ +worth while +mel rose +flo ck +bri ghtly +viol inist +p ere +sli pping +and co +si gh +ha van +cu lo +m sa +fibro sis +matil da +ra fting +aw ard +ë ª +mm mm +ge aux +ste iner +sin n +help ers +beet les +ai mee +tai wan +pistachi o +mac beth +m zan +descend ants +on sale +in r +il m +grou se +sa ig +mo w +bi gre +adjust ments +tu la +mathe w +transl ates +mu h +bol lah +ðŁĴĽ ðŁĴĻ +amo res +ab outs +bomb shell +bla ster +x avi +s ns +k roger +ga ther +erad ic +daf t +chem o +ben ches +ðŁĩ© ðŁĩ +ut v +our a +n ko +gator ade +biaf ra +ok state +im danielpadilla +dom ains +open ingday +kid do +do i +ric e +day care +mac millan +ba thurst +cheer leading +ðŁ¦ ģ +cash back +k won +hob bies +exem pl +ries ling +âļ ª +ag les +ny s +every thing +nav is +ad di +magne sium +faceli ft +ark ham +grand es +extre mist +don at +vit ality +pump kin +be tta +sl td +arti san +li by +pe aked +ah hhhh +mary am +assi m +un sc +ment e +al aya +low ers +ar as +gri ev +le ip +gr ati +cri ses +spr ints +exe cute +w to +ms d +mag ical +re viewer +spark les +juke box +ðŁĺĤ âĿ¤ï¸ı +pay back +licen ses +dun kin +bel t +lake wood +h ateful +bud gets +rev amped +ph erson +ky iv +went worth +ro sen +cru ise +gi ggle +def star +assassin scre +ym outh +win kle +w fc +band wagon +b kk +w iring +kear ney +south side +pe tit +! ðŁĺį +nor dic +mir za +mu gabe +v l +scon es +k tv +sand al +du c +m alls +ðŁĴŀ ðŁĴŀ +it c +al ay +im pair +un rest +flo ss +c é +ab ou +var ying +muse o +ser ver +di ya +hibis cus +ero y +mer ritt +fin dom +f pp +un usually +go tt +conting ent +ali aa +ball on +jo l +hi ked +zy me +ay r +ag n +ga z +perio dic +spar ty +practi sing +lin ton +tal is +cy pri +womanin biz +radio disney +ðŁĮ ¼ +jump ers +endo cr +ðŁļ¨ ðŁļ¨ +and on +shar apo +mi er +ma sonic +fac tories +vi en +bb ers +ìĽ IJ +hol d +ke bab +be ak +approach ed +ac milan +mun ro +ko sher +excell ency +negoti ation +walt disneyworld +cr ouch +te asing +suppre ssion +en ya +b ce +transformation tuesday +cal lie +vis was +p gat +ic ted +end ings +esc u +recru ited +it fc +collabor ations +g ino +snu ck +ausch witz +i fc +x ii +ke sha +ger vais +clo ak +x l +sa ad +prob ation +pre cau +mac in +anasta si +le k +e azy +daysof code +mariah carey +yo g +stit ched +boy friends +sh ar +ph ile +ag u +twin kle +phi shing +week ender +ic ton +gurmee tramrahim +al ton +l eness +all an +pen ultimate +kry stal +go u +lan de +dis mant +ab using +nor se +pat erson +ed mun +ap an +xi umin +sk el +cat walk +re act +wal led +t angle +br yn +ve to +super moon +cas ablanc +appreci ates +ski d +bo th +catal ina +ele ague +cyber monday +cau tious +ðŁ¤ ĵ +nov o +hamp ton +ha ye +jose f +var an +lo bos +roano ke +orph ans +tt in +squ ads +ishqba aaz +black panther +e tu +k sh +cru mble +cess na +reli eved +scul ly +pollin ators +explore canada +ki es +kam loops +kir an +pri mal +sett lements +hot spot +brain storming +ce dric +bi ennial +sh ant +âĻ¡âĻ¡ âĻ¡ +do on +hear n +walk way +fe m +ve al +deport ation +tox ins +elimin ating +descen ding +by the +bla sphe +ha sta +comple ment +as cent +ri ga +provo st +âĸ ª +wee ping +anti semitism +employe e +unearth ed +pin o +natali e +bla d +ang ola +lock heed +in ian +ag r +ni ster +im pala +m ke +fan atic +âĺħ âĺħ +ðŁij ¸ +lu ch +simpli fied +gall ery +econom ic +cy borg +con i +sel ma +in ception +ko ala +dv ds +cre sted +m mor +visi ble +n sd +ðŁĻĮ ðŁı½ +w under +refriger ator +re opening +e era +carou sel +as p +balli stic +victor y +mo tive +tre y +sharapo va +si i +mon ter +int end +west chester +sp e +cy mb +vi dal +ll ama +uni v +fin er +crafts manship +jazz fest +b ch +ag gio +n cc +lamb da +tranqu ility +cis co +ba den +so bbing +of i +go ta +ru mored +war med +ore an +ac ton +mar ci +gh ani +âľ ĵ +as sorted +pembro ke +pen elope +da f +at ty +aim o +pretz el +carni val +than os +ko chi +mer sal +ham radio +ar twit +cas c +guer rilla +kush ner +k app +al ise +todd lers +steward ship +o tti +ter ri +tem pe +rest less +vit o +zay ed +rsp b +pi on +hi ppo +haw thorne +in as +am ily +nut cracker +lo p +d ali +tro pic +ðŁ¤ ł +ul o +jare dle +py rene +pale o +usa ir +m ould +it ated +gene tically +biom ass +ðŁĩ³ðŁĩ ± +do dd +practic ed +monarch s +un manned +m buhari +am al +photo gra +ko ol +bren don +ju ices +cu re +world bank +poin ters +ðŁĴ Ŀ +tur f +le ds +bor ussia +bapti sm +warwick shire +moun ts +gay o +be gg +co pied +asi ans +k g +moder nist +gi d +front man +concentr ated +y t +sc avenger +iron ically +adi c +ps n +ðŁ¥ ī +cultur ally +yu v +mac arthur +fertili zer +be withyou +ri gor +min ors +z oning +âĸ ł +ri r +adole scent +vin ny +ren g +sand stone +gu et +we sth +ple dged +lac ed +sp ide +v ai +ty coon +seiz ure +du p +appalach ian +ro k +cathol ics +sey chel +posse ss +la ger +jo di +cham p +stra s +d ina +cent uri +cal der +blur ay +ðŁĩ¨ðŁĩ ³ +mo do +an nette +youtu bers +chap s +ang ling +label ing +a qui +pk wy +ly le +bi sexual +lit ur +dug out +li bby +grey sanatomy +sub stances +august us +rall ying +fi del +ing ue +äº º +hallmark channel +tooth brush +m á +adi rond +ag gi +ðŁĵį : +cru sade +tax ation +k z +i ver +dou bling +room ie +wa b +en rolled +az on +a ju +grand children +as df +ðŁ¥ º +mat ic +ough ton +utili ze +ðŁĴ £ +pon der +rais in +dys function +co bain +butter nut +e man +su red +dri an +and friends +with the +on omy +heine ken +bri dal +leader ship +pyram ids +deutsch land +jo cel +bo wel +y qr +horse power +be acon +ing eni +gra dient +fer mented +mo om +thing y +pot assi +wrist band +bor d +bo died +ðŁĺŃ ðŁĺį +ma pp +ka u +cyber punk +ph ish +loo king +co ates +ap ur +am ie +uk labour +at in +g la +adop table +shel by +v illi +ri ya +m ingly +cli mber +bumble bee +ðŁĺ ¸ +c sd +âĿ ¥ +hospit alized +c ki +hat er +ch r +re tina +it a +fan base +beat rice +gwy ne +go ss +fo s +favor ited +swachhb harat +mal ade +mon mouth +" [ +si van +sh hh +command ing +sains burys +wee d +g man +ss w +rep tile +iv y +tro pics +roll ers +over cast +ex position +masquer ade +man crush +wa ist +spr inter +sle et +le vin +j pg +_ ( +o pel +explo it +ap a +po we +wrec king +jong in +or b +er ick +bo sco +pra ising +ber tr +to wing +in security +ku t +resto cked +rr p +prescri bed +trafal gar +per t +g ases +app rais +g har +music als +âĸ¬ âĸ¬ +mc fad +ag ony +conditi on +equi p +shi k +atra vel +ðŁĩ¿ ðŁĩ¦ +ke h +abduc tion +pe oria +wil kins +g ms +as d +ev i +ðŁĴĹ ðŁĴĹðŁĴĹ +u z +mo c +halle lujah +guad alu +lou vre +dra wing +go ve +ph ant +fri e +web dev +program mer +z able +games com +clari fy +li th +kin ky +âĿ £ +labour doorstep +son ata +ju ris +mai den +vi adu +buch arest +conditi oned +capit alist +u de +ps b +sp ca +lul la +footh ills +kay o +bon d +wom b +roun der +ce sar +bur sts +ap ra +sw oon +sab rin +fra grant +cle arer +ku brick +cli max +jour no +ag le +ðŁı½ âĢįâĻĢï¸ı +poo ch +hal e +sol it +sal mon +organis ms +bron son +art en +hodg son +alo ve +vent ure +bb i +ae a +ðŁIJ ¢ +ld n +d nr +o zone +el las +man ny +azz ur +un beat +tru ffles +th ong +ma ñ +las ers +ley e +gettys burg +back packs +or is +ma ison +craw ling +la bra +cl ing +dra gging +ste al +dou bt +de van +ck ers +agent sof +photo bomb +elon musk +abo y +dist ances +story line +sp i +nor than +europe ans +wh ale +ser pent +ðŁļ ² +fi or +tr it +ox o +awar ding +class mate +su fc +smar test +rich es +pr k +big foot +ar mb +bi polar +dw elling +om ars +k wan +gri me +m eng +freder ick +navar ro +sorry notsorry +jaredle to +pa ve +sl ack +barn sley +att ar +evic tion +accumul ation +o ir +cat chy +wel ter +vik as +has see +nik ita +mo yes +mathe ws +shi v +gat wick +pro filing +compan ions +mar rake +an tics +ðŁĻĮðŁĻĮ ðŁĻĮ +se se +bo i +bart lett +poison ous +ab uses +ym m +kam pala +guggen heim +imv kohli +dol om +bre e +thro ttle +gare th +fitz patrick +un ya +par ad +mar got +j nr +we a +potassi um +p nc +disgu ised +cra sh +ren ergy +ill ic +coup led +ni els +ci ones +æĹ ¥ +im ent +despic able +d ye +what cha +conne ctions +paralym pics +gaunt let +wait rose +suici dal +star ship +vap or +st ou +law maker +coo led +si mo +then o +offro ad +ja den +bas que +vick y +lu kaku +centr o +tri sh +strate gist +medic ations +hor st +b fc +gra il +sharp ly +ad itya +tom b +kau fman +tri pad +sam ba +pastor al +brit ney +sag an +hill side +mas ons +sar a +z one +x u +to tes +rob bie +app en +mon tag +der o +short film +charis matic +tat ors +ki ba +and ri +al arming +split ting +ic ar +th ug +scari est +sylve ster +an an +u trecht +a difference +me ade +bu ster +air strikes +cu ffs +account ants +ðŁĺ¡ ðŁĺ¡ +new t +bo tt +issu ing +cl ancy +wwen etwork +kyu hyun +rese mble +pajam as +sin k +kin ney +sul ph +or k +li es +la gh +or ton +ra hul +d sc +we will +re am +collo qui +shar ia +hec tic +sar casm +land er +tm z +endor f +ro z +ham mered +fri s +w adi +pope francis +he it +flash light +un born +op es +hol iness +ðŁIJ ¦ +nach t +im sa +gr acing +bj p +ver ts +c sc +home owner +a que +bigo try +anni e +bag h +âĿ¤ï¸ı ðŁĺį +car i +thom p +dispo sable +cardio logy +pat ented +hh hhhh +ld r +stephen son +cro res +fan ning +cli mat +ðŁijį ðŁijįðŁijį +ðŁijį ðŁı¼ +aer on +piccad illy +bank rupt +sil via +emplo y +don ny +commen ting +screen writer +io ta +ce an +anc ers +tu an +street wear +ठ¯ +sk ine +esp a +asi f +os ce +she ppard +more cam +bott le +der s +orac le +google play +aver aged +edmon ton +steph an +sister hood +cru sted +stag gering +methodo logy +congress woman +c abo +tri ggers +mil ky +gli de +tooth paste +room mates +nu ff +gu am +sprink les +alternati ve +wat fordfc +uof t +hal ey +cont acted +bun dy +pro stitu +gh ar +pre ston +on site +hil ar +g ts +c att +hamp stead +? ?! +ðŁĩ§ ðŁĩ +bbc qt +aless andro +resi st +ma idan +t ko +shad ing +pin up +gal lo +sin u +at ec +fun k +ac lu +stri des +rhy me +wet land +bbc springwatch +t ins +wild card +st our +flamen co +pau la +onto logy +gang sta +am ade +ãĤ « +t bs +skelet al +run ner +jard in +harri er +hun ted +z hen +believein film +de mean +au diti +re start +chon dri +âĿ¤ï¸ı ðŁĴĻ +mcla ren +ga b +sh um +au sa +lewi sham +y pg +k jv +fur nished +dor o +bon ded +mor ty +lat itude +_ ) +lo va +water ways +vin ai +shor th +drun k +c ay +ay ana +kap lan +capp uccino +spr o +life boat +has bro +spol ice +tor on +do ing +dam n +sh ree +foun tains +ent ation +mar u +boar der +to pless +j ada +chan ning +ul ls +en closure +gib son +fractu red +brit ton +à ¶ +t ous +por th +dra f +tra iling +mar gate +eli fe +down ward +lin n +gla des +girl power +ak rish +u ki +ron da +ts c +appreci ationday +vis ing +lo om +ðŁį ³ +mex ican +ar gos +y ya +jad ine +south port +d end +si sta +rede em +men g +bra xton +antioxid ant +s key +mp g +fin ding +vibr ation +ce u +kh art +di mini +cl ine +shel ly +hin es +ī ï¸ı +to pical +no ver +ma xx +prim itive +illustr ate +b ounds +tren ton +join tly +breed ers +u chi +wakeup america +b ada +ðŁĹ £ï¸ı +gu acam +sp heres +pere gr +youth ful +lo lo +bir min +t ly +jeremy corbyn +defe cts +co sm +a rent +v aa +bag els +medi ac +cori ander +ic ago +g haz +ab bas +re model +struc turing +pu m +out law +ad ani +r bc +gul ls +n li +confu se +ðŁijĩ ðŁı¼ +vil a +mcnam ara +correc tions +mug hal +ser i +re gain +ss b +lea ve +haha hah +gran de +di stressed +re chargeable +ho a +hou sed +sti l +attribu ted +opath ic +di ps +pri t +head phone +conclu de +pil o +he t +ut sa +nit in +je m +sni ppet +tutor ing +op er +sun k +en sla +cha u +ac orn +quinte ss +ran kin +affili ated +our lives +cl int +se ater +isa ac +ba shing +sme ar +nur se +doo dling +" ; +sa ku +atroc ities +im am +g fs +viol ating +comm end +brad shaw +er ville +b illed +b be +thul hu +i phones +moo se +di os +re w +me thane +strang ely +whis ky +ti ghtly +spiel berg +radi us +notic ing +wi f +ig nati +i fa +ap is +w ali +ha itian +bu shes +y z +v l +ex ited +asse l +tru ec +dom en +ash er +in king +newyear seve +hend ricks +bat i +ìĿ´ ì +rich ter +mon santo +con line +agre at +ðŁ¤ ¯ +master pieces +ar n +rough s +cle ve +se v +fashi ons +to ya +sh ail +cop eland +aqu ari +dec als +are you +y aya +a str +fon t +ml m +ar ca +pp or +pol lock +xper ia +conserv ation +chain saw +ag gie +?! ?!? +si le +sh on +ìĹ IJ +note books +marque tte +de us +bb led +spic er +mc cabe +nor wich +modi fication +boo sted +stru m +sales man +bang le +nis san +hez bollah +brea sts +a af +anth us +sk er +ow ed +her os +gi fs +fo sters +eat ers +du es +_ / +lymph oma +sf am +me gal +afri di +ag ic +p amp +jeal ousy +ðŁijĮ ðŁı¼ +calcul ate +napp ing +g ale +ðŁ¦ Ħ +lub bock +assu med +ren ting +íĥ ľ +subur b +ãĤ · +tech nic +u cla +in front +gar net +ster oids +stri ving +ho war +mo ver +le ton +bull do +is in +ci ao +sn z +fore front +d ams +mid wife +ma wards +cla pton +we in +subsi dies +spr oud +rother ham +phan tom +ar ach +spi el +rac ket +sel amat +no on +l bc +enti ally +ðŁĴ ¸ +sil ve +m oud +kine tic +y asi +ðŁİ © +o ol +mi ku +i za +fer a +flo ren +barber shop +groo t +z est +ne ars +stan is +z and +police man +juris dic +form ations +appar atus +sp d +arti fact +to sc +motiv ating +womanc rush +re dro +diagno stics +ra za +out fitters +el xn +dod gy +ry n +sh d +ortho don +ol de +jay anti +bal ances +quic kest +can ton +friday reads +! * +na a +a ak +ðŁĶ · +behavi ors +rasp berries +ä » +polit ical +cam il +å ľ +di k +ast ounding +lie be +novel ty +tur moil +sul ly +spring break +hon ouring +cc g +ðŁı Ĵ +my little +ky c +pro ms +ðŁķ Ĭ +à ¨ +bi ge +av ril +ðŁĩµðŁĩ ° +mari on +as ants +sur ya +oc tag +luf than +ac ron +fayette ville +ti que +love s +en ca +de kalb +ta ver +de vote +aux iliary +joh annes +tread mill +ay an +qu r +donald son +cher yl +" .... +s ven +kir sty +gun ners +ra dish +o ahu +v sky +i ble +con course +b ps +elo qu +ash ford +te bow +roblo x +ma da +dri ving +th day +spro ject +m ms +band ed +. !! +libr arians +flan nel +intoler ance +her al +ç µ +neme sis +list a +tar ak +cry pt +star plus +vish nu +sc ale +cr is +% ), +j illian +regg ae +pegas us +ol in +ip ment +man ic +l fc +godd ard +ite am +parl our +anch ors +lee minho +talla hassee +ant it +d ho +kid ney +y ash +batt led +az ad +gar is +faul kner +sni ff +papar azzi +ed m +phy llis +con tested +aa ay +se ca +k ton +vel ve +rain ier +for um +tam pab +ho sp +trac tors +ox fordshire +no tion +guang zhou +ðŁĺ ¯ +ref ill +wednesday motivation +sli der +mukher jee +pr att +fon taine +alph on +af ar +ts i +pest icides +fi ends +mo cking +bra w +tran sat +do ses +co res +hom ophobia +docu menting +zlat an +con doms +s é +sun set +kun st +ton ga +ภª +v ation +sp ray +chow der +ra ps +palla dium +nor wood +music history +hoo ker +si si +osp rey +ph ys +conce ded +bob cat +ar mad +ze it +Ù Ħ +ðŁĺģ ðŁĺģ +mer idi +ðŁĩ· ðŁĩº +corn wall +! ), +touch downs +ze it +chal et +mm m +al che +gor illa +fo ss +ati ku +lumin ous +ivan ka +be ek +sta res +sw iss +âĿ¤âĿ¤ âĿ¤âĿ¤ +scru bs +me ath +gusta v +jo gging +confe tti +as os +ers fc +breit bart +applic able +autho red +ya ho +h in +displac ement +j v +ðŁĮ¹ ðŁĮ¹ +ot c +non profits +diec ast +gu sto +inte stin +c ages +me en +lu kas +moon ey +ðŁĺ · +very day +tor ah +is sion +wa c +lever aging +ish able +cu se +le wood +may an +turn table +ju ice +tru sty +tu p +eti quette +supervis ors +stu n +gu zman +confe ren +ric o +fe ast +back ward +pol aris +mic he +jo g +h ing +field house +vel ing +sho cker +esc ence +ठ¾ +vi be +anasta sia +mar ched +kill ing +Ķ ë +fe tt +exop lan +... ( +snow day +lo h +ir ani +la khs +del a +po caly +boom ers +dictat orship +ac er +tur keys +quarter final +muskete ers +ðŁĴĽ ðŁĴļ +sf x +museum week +sc ala +ri sis +( ðŁĵ· +ãĢ Ĥ +z ies +bo eh +hu es +lu sci +dol a +impeach trump +roo d +don caster +tor re +hero es +fo yer +tar i +blur red +ke w +frank ly +dro id +ap al +Ð ¼ +y af +bre t +par agu +cac ao +ðŁĻĮ ðŁı¾ +ru e +head aches +shaw ty +char ley +pal er +go wns +correc tional +ðŁĺ© ðŁĺ© +breaking bad +ol ing +da p +endeav our +cit adel +tra d +incumb ent +medit ate +foo ted +ðŁĴ µ +shab bat +dayof the +wil lem +gal way +to red +marri age +f illion +sleeve less +aud itor +jin young +invin cible +kad una +a and +volcan oes +mon eti +indie gogo +buccane ers +ðŁijī ðŁı½ +ãĢ Ĥ +lay ton +cuck oo +hu mber +buzz er +Ï ī +to re +stra ins +sto m +pa ine +s we +du ff +z ou +si mi +li pp +ur n +se agu +ðŁĶ ® +sun dae +hi c +ðŁĺ ¨ +bull pen +u per +flyo ver +al dridge +glo bes +ali es +ken zie +ge es +y cle +sp lin +mag enta +j ha +bal u +gh orn +ti pper +wick er +taste of +con clave +ch ale +inv asi +cat er +dio xide +me gab +win n +at p +transform ative +nest led +hi g +bri dging +lil ies +chee red +bad dest +sc rolls +real is +dipl o +ðŁĶ « +conce ssion +prefe rences +explo des +er gon +introduc tory +ine au +ch af +som es +land rover +spir ation +sex y +sco recard +illustr ates +soul mate +wi en +inter disciplinary +fore casting +ent ities +glu ed +en lar +cur t +percep tions +boot leg +mi re +asho k +v az +hor ne +cal le +ac ulture +ther oy +night time +oc al +character design +ar mist +ðŁĺı ðŁĺı +yah oo +ac eae +to se +even to +sou t +nay anth +wh om +v are +ri gging +gen us +hi ve +com mands +sti e +day a +ethan ol +en f +hi fi +flu ence +cle mson +re invent +thermom eter +humor ous +emer ging +aci ón +ðŁĺĺ ðŁĺį +s ity +haw ke +accompan ying +t ility +ðŁĺ ª +re cess +protag onist +l ery +dun dal +int l +britt any +q bs +off the +marri ages +how to +viol ated +adel aide +wit t +lanc er +pak v +hu me +st ade +bra gging +ou tright +ad c +super st +real time +cu res +garden ers +ero ck +dale jr +ver o +bar tol +mo ti +mc fly +v pn +st ink +over rated +guer ra +e tis +ath ome +twd family +th ab +tn x +rafa el +family travel +x ley +sat anic +equ ations +ru dy +wal dorf +stan i +tu be +meas les +zimmer man +obli gations +i ously +bow ser +trans former +sho ppe +shak en +gh ouse +to d +ke tball +share holder +mar ca +kp mg +ak an +given chy +coast al +au th +roller coaster +mar ches +coordin ate +cine ma +apprentic es +par lor +mit o +men on +consider able +bar re +glo ss +enh ances +jaz eera +fal mouth +thra sh +stat en +k zn +eng el +samanth ap +flo ppy +sal om +ðŁıĨ ðŁıĨ +w ack +deliber ate +osc ill +herit ag +du sted +orni thology +pad dle +fer ns +bar un +cl ans +anticip ate +a ay +mat ically +é ĩ +tu mble +post man +unic ef +tro tter +op d +leaf let +ge ist +cease fire +scre ws +cre ation +wal nuts +longh orns +under statement +ab b +proxim ity +na x +un ity +turn pike +orda ined +dub step +chak ra +me ch +love her +look alike +donne in +vir on +Ù Ī +bang ers +vari ants +out dated +in ta +cri sto +sp elt +food and +f on +stefan i +margin al +hu tton +ti ara +tel ford +qu en +fair grounds +que tta +mikha il +heal er +v ball +ty re +under grad +gl end +hom ers +scri bed +main tains +po che +mis sal +mar ko +u as +á n +sh p +con vey +pad re +sab a +pu glia +madhu ri +pa xton +chap lain +n ago +ca si +... !!! +fli rt +sal eh +k are +di re +stam ped +extre me +ðŁĺĥ ðŁĺĥ +ho ppy +guadalu pe +advant aged +eu char +p low +un n +mac qu +port land +cla sh +pe s +lou bout +y p +keep ing +arca dia +fran kie +fi u +de th +encyclo pedia +si ze +inve sts +ðŁį © +geo logical +fran ç +con front +ðŁĺ ¥ +d ys +af m +tex an +graph ene +repost app +ac f +ur sula +gaz a +dd led +fu m +wsb tv +m be +fron tiers +chrono graph +ke s +inter faith +tab oo +spar ta +won do +flori st +em braces +ca w +no el +arch ers +ðŁIJ · +roman o +ban an +sh akers +melo dies +geo thermal +se phora +ìļ ° +оР´ +pro c +hand shake +pan de +popul ated +slow down +hor tons +registr ations +un deni +lan ts +pas sover +thak ur +li ef +adhe sive +pe tal +micro scopy +memph is +confir ming +air drop +mesm er +perce ived +ming le +lifel ine +gh j +worcester shire +pas sions +ach er +el lar +ah o +firen ze +bar ang +letter man +hat field +lu cha +je ter +e shop +william s +horo scope +pre de +east bourne +dur ga +di version +al trin +seis mic +premi osm +nar co +ti r +ori g +or m +land fall +ci ous +lin do +max ine +x ico +tra y +os wald +c ba +ric otta +n cr +mar au +ภ² +gladi ator +ch ery +lun g +u me +po psic +lon ging +can als +ta ya +decentr alized +sho pp +pres sures +mahar aj +eti had +wal greens +succe ssion +sign aling +li g +staf fer +north korea +def ying +as ma +de g +peri meter +oak ville +m sk +balti more +rece ip +de ple +ðŁĺŃ ðŁĺĤ +jambo ree +> .< +rsp b +puni sher +consider ably +in tothe +pari sian +acceler ated +polye ster +low es +fr ying +sauté ed +mou ths +seychel les +ra x +go dis +dak ota +house wives +the me +mat inee +black bird +ye sung +pre fers +pelle gr +in ated +trun ks +stronger together +re pet +re pairing +ped als +toler ant +her r +dun ne +indic ation +decat ur +b tv +exhibit ors +ik on +friday motivation +bra gg +live tweet +al ves +womens art +foreig ners +wal lets +min dy +lan ey +bb in +tv miaw +lif ter +tar get +tam e +dr ou +astro photography +mp c +g pu +nord strom +fric tion +run off +lov able +sp nfamily +ext ingui +bloo dy +sch el +arti stry +sw ish +scar ce +ph ils +max im +pos sum +com promised +sty li +sc fc +is sa +birmin gham +sket ched +angel ica +ordin ance +je ts +conqu er +ðŁĺ IJ +online shopping +s ori +reason ably +nue stro +ar turo +ch l +benef ici +spho to +wel t +ni kk +ðŁ¤ ŀ +dan ao +for mid +as se +af irst +âľ Ĥ +gil lette +as sor +an onym +sel ca +fe mi +bear able +y and +ar mory +cre pe +celtic fc +bra vo +in expensive +de lec +ge cko +new market +snow flakes +kab ir +con tra +can ning +mor pho +gar wal +ðŁĴĥ ðŁı» +fight ing +mu tation +woo dy +ju gg +gr aces +premiosm tvmiaw +kenne dy +gu p +sa e +op ha +off spring +fini sher +bet ts +span ning +mar j +h one +sh ing +contin ents +samanthap rabhu +un related +l acy +explo sions +benjam in +sophi e +no ting +micro soft +as sen +a hoy +i ker +ho fer +mo e +ah madi +yan n +an ak +ma hi +be u +aha h +creep er +baahu bali +am at +pri ory +haw keye +deloit te +sko da +print making +assemb ling +mirac ulous +no ch +sw o +leg a +oper ates +border lands +eli e +stron gh +rep tiles +pir ate +un fold + ¯ +qual comm +un predictable +ot r +rose wood +direc tional +counsel ors +corn ell +liber ated +j ad +ir regular +bulgar ian +high ness +vodaf one +sw ild +mini mize +gra zie +๠ĩ +r stats +stre ep +ome tric +humb le +lu mp +l ille +b ü +home depot +tripad visor +ki wan +a via +er z +ex ico +du f +blu men +mi zing +ar ma +in im +con stan +sor a +ju al +au n +tw ell +tren ches +her a +r k +po plar +recipe oftheday +ll an +bhu ban +short ages +ing don +bridge water +ðŁIJ ĺ +fortn ite +cam den +un cture +pro w +colon ies +t ks +n go +b hm +live pd +spl ace +sli ke +happye aster +ter rence +revol ver +j ed +yy yy +office of +m ts +exist ential +r ourke +explore bc +sse d +pri est +vix en +si ding +k pa +a har +ju ic +ob struc +foren sics +uk mfg +cancell ation +we ary +ab q +ele c +pri zed +deb ts +me zz +salv atore +m dc +gre tte +c gc +th on +snow storm +ts ch +cook ery +å ¹ +wa xing +n acional +mur s +ra ve +cap es +ger main +dri pping +sub mitting +ome lette +iter ation +aj es +shim mer +fu eling +ðŁĩ§ ðŁĩª +li po +bo bble +un follow +islam ist +hi ber +cat s +agentsof shield +sen si +____ _ +ster ia +inst al +ausp icious +har row +over land +femini sts +inst ant +char iot +blind ness +sp ed +sc arec +nu it +mini atures +ho seok +glo ck +fifa worldcup +e te +dis m +we iner +ex foli +ear ts +ภĶ +my art +man il +iss ant +form a +in cu +buffal ob +in tim +mc cul +anj ali +po po +un doub +hil a +fun gal +thank ful +fu tur +en dish +ren ds +th ar +she ff +ring o +nichol ls +io wa +po tom +cl ams +ãģ Ħ +acon f +stadi ums +di mp +di k +residen ces +do v +caric ature +seagu ll +kl m +confe ss +sla pped +cele b +turb ines +pp v +nur ture +el ab +.... .# +tu ff +de press +al far +amii bo +di spon +e wing +que er +friend s +for re +âĺ ¼ +sw t +aqu arius +head liner +cur d +fi gs +o tters +love fl +kare em +go vegan +fri yay +consol ation +at ri +ì§ Ħ +âĺĿ ï¸ı +poly ne +gu ed +o ya +la us +intestin al +cam illa +scal p +pi r +leed s +horri fying +bore tum +dand elion +fer rer +ell ic +as x +so ren +re loaded +ale ague +navig ator +ine tte +add ams +al chemist +ak shay +dystop ian +awe c +n aya +al isa +ai led +ag or +avi ator +ali zer +smo bile +findyour park +cop ying +to ddy +sh ti +mon ger +cal houn +nap kin +break up +y atra +se thu +ric hi +eras mus +fer ry +am ore +prac tise +bo bo +power point +oo se +li ffe +chin a +sh ka +fad navis +du ane +war on +fal se +ðŁļ Ĥ +wa shes +disc ip +==== ==== +g k +ab b +stub born +medi eval +p ci +ðŁį ª +maril yn +h yo +man di +cr i +prede cess +continu ation +om usic +s lat +wh al +mall ory +bon n +shen zhen +ca i +âĺ ĥ +sa fest +for wards +dra wers +bla sted +sle e +mor phe +mb ta +dumb ass +ÑĦоÑĤ о +alhamdulil lah +ec lub +al beit +heal ey +ayurve da +adverti sed +cro cs +itt les +bry son +be i +nj pw +honore e +fu sed +ðŁĶ ĺ +mul tin +n aga +de parts +ko p +kin o +jhar khand +ed na +ax le +mil ton +supremac ist +marrake ch +domin ic +tran script +] [# +: ). +wo c +sur rounds +o gil +leaf lets +co well +whe w +tru de +proli fer +succe s +sports man +con dom +po che +k up +imprison ment +{ } +scram bled +å Ľ +ka ine +cell phone +metam or +con i +remn ants +ee z +down pour +afterno on +exerc ising +ber ser +architec ture +wick low +m ns +is p +bo c +n iss +mn wild +stu mble +r si +lu ffy +sil en +dd ad +bul lies +haw ker +bb cc +scu ba +e pp +que ts +for aging +pal let +ha di +cinemato grapher +cat chers +to aster +k hi +lite coin +kid lit +amher st +maur icio +ip ad +mar malade +fe y +don nelly +g to +est as +cere bral +ant grasso +zz led +vir gil +swa pped +ðŁĺħ ðŁĺħ +no dapl +greate st +nhl bruins +fra ser +b mo +ane w +. âĿ¤ï¸ı +se gregation +remark ably +mccor mick +lo gger +er as +contrac ting +âłĢ âłĢ +yor ks +uku lele +touch screen +de cked +ben n +south wark +ra vin +nu mis +ðŁ¤ Ļ +ru t +gre co +eth ic +red neck +ar r +t cs +ih ri +ðŁĩ« ðŁĩ· +l k +inher ited +zy k +viadu ct +marty red +hi gu +ss n +be in +street style +fer gie +bank of +æĹ ¥ +stake holder +exempl ary +cre ss +ess a +ero tica +intre pid +gom es +bra un +bethan y +bang tan +pulmon ary +m illing +doctor ate +trump russia +ठ° +s ani +bl att +pla u +depri ved +t le +ful ly +bour n +st ak +lufthan sa +kio sk +far oo +def y +bad an +ðŁĺĺ âĿ¤ï¸ı +rit z +tri sha +ran ds +middle sex +arab s +pro j +sport scenter +repe ats +iv f +bleed blue +as sure +o bs +territ orial +ele n +bever ley +ann ah +âĿ¤ï¸ıâĿ¤ï¸ı âĿ¤ï¸ıâĿ¤ï¸ı +z l +for good +science fiction +gla u +son ya +pri th +st weets +mix ers +mari o +ant elope +writing community +went z +den ham +be di +sf o +harley davidson +look book +immuno therapy +or phe +es ville +ed ged +tas k +sb ball +corro sion +kilom eters +co sting +play back +ke ke +di visi +u ter +re location +yel led +pen g +up beat +ser ve +âļ ł +hal en +stir ring +reh man +en v +schu macher +frag ment +alkal ine +sb k +resil i +share point +rol lover +tra sh +counter part +âĻ « +ob itu +à ½ +ãĤ ¹ +mul berry +ðŁİ Ĩ +auton omy +spra ying +nat l +love you +fran ki +nu k +esc ar +can teen +ali baba +de plor +mole cule +pu d +fort night +blon die +sp hin +portra yal +ta che +bu te +consi sting +freep alestine +c sp +im mort +d ns +ðŁĴ¥ ðŁĴ¥ +tour de +coo king +archi val +ga thers +bit t +b anc +pre mature +snow ball +poetry day +lou dly +fug itive +ed ay +em ra +ðŁĩ¸ ðŁĩª +sci en +node js +jur gen +je ong +band ana +un is +fox sports +v andy +pro visions +wee p +tu k +i ko +h oun +zig gy +z r +fil let +bat a +tin k +con e +we want +k ilo +hor ace +sl t +sc t +stay tuned +victor ia +umb ria +att acker +ingham shire +fright ening +no ir +fr at +con tempt +lia ison +ho i +br ink +tr ill +ni agar +kick ass +dun das +not my +rho de +bu mble +no xi +fa g +spec tators +mancrush monday +jin ping +distr act +dais y +wal den +portra it +ar thistory +vol tron +ev el +is c +ac m +r ite +na o +de ported +swe ats +ru fus +lo bo +labor day +gam o +ihri thik +bl it +abdomin al +ãħ¤ãħ¤ ãħ¤ãħ¤ +i it +e q +bu sy +allu arjun +un disclosed +de ton +pro create +ki l +ðŁİĤ ðŁİĤ +mitch ell +ki i +inherit ance +al p +jo burg +pat rolling +compul sory +un signed +ni am +l ga +eshop suk +tr illi +ma w +appreci ating +rock ab +mañ ana +an tal +mal vern +roy o +grand prix +sut ton +go ftheday +dig i +ãħĭãħĭ ãħĭãħĭ +t les +varan asi +erec ted +discip les +cont act +ðŁĺ µ +li d +⬠ĩ +scen tre +radi ator +ing tips +trans itions +thursday motivation +chem ical +separ ati +sal is +mi m +geo graphical +book fest +/ . +âľ ĭ +v ae +cur rie +ag garwal +acceler ation +the ses +lg m +u mass +pro portions +nat a +ani ans +ku ch +be acons +ap r +@ # +ðŁĴª ðŁı¾ +nu ke +sher aton +ki o +ma kati +polit ico +mor ale +ì Ļ +econom ically +gg ly +ss en +pa stries +intern ships +vic ente +fanta ken +aveng ers +accu se +slee pover +indic ated +the dream +ster one +ren ders +fro st +ou i +gre gg +d ore +⾨ ⾨⾨ +pu gs +sat y +nu mb +hems worth +tam i +la ssic +schi ff +igle sias +ag awa +] " +re shi +game stop +divor ced +theat er +clau di +un conventional +prophe ts +ac in +twel f +tow ering +t ml +sc lerosis +k wan +ge ts +distur b +na ira +ener g +pir acy +pru itt +noti fied +hen na +bra m +ground water +bl s +opti mis +$ ) +luci e +biz hour +fang irling +gr ills +or l +ver se +c ina +law less +artistson twitter +tele vised +marshmal lows +radio head +bar r +m fc +bre vi +mmor pg +g aya +âĸ « +sub titles +j t +disney land +to bago +nh m +groo ve +fi awec +" / +ba o +scra bble +om ni +ff l +um c +si mba +ali er +ter rell +plu me +mi di +dig nit +co c +bru t +ad ata +alche my +d sm +ðŁĺĨ ðŁĺĨ +win try +spa res +cu er +conclu sions +to ys +od or +fl ann +gar vey +scrip tions +inspec tions +cat ap +ang lo +st louis +heim er +at ay +tr ich +en yc +chil ds +vent il +mont p +guiller mo +circu lare +z ell +mode led +craf tsman +al ina +stimul ation +cashe w +ju das +best of +to ire +susp ends +scol lege +real ising +by tes +bloo ds +as si +ðŁĴ ¿ +o hs +ðŁį ĭ +scallo p +ठµ +gi fting +camo gie +wil kes +o zzy +ðŁ¤ ¤ +ver onic +sav oy +deme tri +baby girl +ðŁĺį ðŁĺŃ +so x +cly de +induc tee +count down +self care +ठľ +vi ka +tor re +phd chat +pe ars +aw h +suff rage +le sn +admir ation +mp p +shark week +schul z +santor ini +clo ver +( * +stras bourg +ex iting +so yu +finger print +che a +ãĢ ľ +vin dic +song writers +so a +prou der +nam a += )) +simple st +delici ously +gil les +u q +mn wx +ep p +sh un +ken nel +fall on +ðŁIJ £ +sin d +tra gically +out es +modern ism +co ke +gy n +spi on +âĺ¹ ï¸ı +le am +compress or +apolog ise +twent yon +fan atics +âĻ » +sco tsman +sa wa +ko u +as er +ภļ +welter weight +phen om +twick enham +stri a +p out +ka z +gi am +cd p +ho y +emplo y +red mond +ภĦภ+sm ere +trance family +proto cols +pie ce +lu iz +iter acy +carl s +united states +har med +phd life +ch aw +foot prints +l é +cho ker +z ana +sli pper +eric sson +insul ting +articho ke +advis ing +acquis itions +op or +mut ations +re ar +ॠģ +pod cast +wi ther +kun g +íĺ ¸ +win slow +di apers +ðŁĵ¸ @ +ec ker +col lar +hu ey +gi ro +mono gram +kas ich +si veness +malay si +arom atic +gre s +gali leo +u ji +rob b +dr m +none theless +as a +: > +lo a +l np +at work +ag t +laksh mi +pipel ines +id al +stre l +re all +chain z +stone wall +san sk +ðŁı ´ +pied mont +hoste ss +ci u +t é +analy ses +wil helm +scott y +rw by +mosqu it +use mb +qu ins +ðŁij İ +tu cker +s conf +speci fications +psychi atry +broo kes +s ils +ol af +de to +co di +cli p +fil th +womancrush wednesday +go to +ang erous +be ale +w tc +paneli st +ne x +lar sen +emili o +tab leau +h itters +conce ived +americ ani +or tega +mar di +Ñ ĥ +pain tball +thir sty +new yorker +etis ation +go ss +we aker +u gh +tro ll +har ga +du al +ght ning +at ine +ðŁĺİ ðŁĺİðŁĺİ +cook out +pyrene es +po ss +authent ication +sports wear +yun ho +kir o +archi pel +shen ko +ren der +nov ation +divin ity +ðŁij £ +su fi +humb ling +ge opol +devote es +wait ress +tr ough +py ro +i ba +bl ing +gra f +epilo ts +bt r +of tball +bas king +domin os +so om +r ath +sher yl +qu el +astronom ical +wel d +track list +sig nee +slee pless +com man +ch ron +summ on +pure michigan +cri spr +sli p +la gi +ra q +um u +thal ap +char med +scru mp +quad copter +ski p +peter sen +mun i +ðŁĮ ¾ +mon aghan +tra ys +ick ed +canad aday +te gr +ï¿ ½ +hot ness +heavy metal +ab ar +gop debate +az ul +spider man +sun flowers +ľ ë +web comics +bar d +Ð ² +nichol as +slu sh +ram an +mark ham +ffici al +ff ler +íĬ ¸ +ple ss +anush ka +to to +sk aters +pro wrestling +compet es +ay ala +myster y +thr ills +mp g +independ ently +y ul +imper ative +formid able +tire less +st acking +ton gues +mal tese +pot ts +mat ti +char ting +chill out +super nova +ome o +sky sports +nu tty +ðŁĹĵ ï¸ı +ro han +insp ired +concier ge +ser ra +ma kk +gal at +chi pp +ye v +ì £ +reim bur +op ul +kimber ley +i eee +bre men +ch itec +or in +nak u +bon kers +foo ty +emer gence +ðŁĨ ĺ +sti p +serge i +zo ey +ai me +wou ld +dy es +destin y +vinai grette +dri er +circulare conomy +an archi +ss r +sch el +cin er +gro om +determin ing +gar min +cal ais +incarcer ation +bu kit +no i +chelms ford +mckin ley +chi pped +belong ed +tu mors +str oud +mi i +influen za +wwen xt +tun dra +tele communications +cat sofinstagram +t ages +beat ty +o du +ml kday +oo per +dang le +ak ley +cru mb +anti gua +ti mbers +rou hani +ðŁĴª ðŁĴªðŁĴª +ha fi +... !! +w cs +coo p +sn c +lit res +ãĢ Ĭ +ha z +co z +k ant +green field +cur ti +y ale +flye agles +what soever +wor thing +rou lette +flyeagles fly +un da +a inted +stand ing +lusci ous +h pc +effic acy +ash land +me ghan +ky wx +n pr +bath tub +ac os +h ani +mar cor +man tis +da isi +bo ba +ab bie +mu til +vi al +spy der +po z +g ti +el fie +nigh tw +metro id +anton i +mad die +dh ry +dar lings +ten ds +taek wondo +atlan ta +me ow +chlo e +ãĥ İ +ym es +siber ia +k con +gu es +mar iner +fac il +azz le +[ ... +han nover +bav aria +vir go +te uk +u sps +) # +wall a +sam pson +need less +ver bally +hay ley +bow led +pi us +lam pard +ham string +vol vo +road safety +cho king +sor bet +a hem +healthy food +brai ded +horticul ture +cr ative +che ek +ad do +the force +ko ko +schiz oph +j ie +w ada +twentyon epilots +h bcu +pro ton +pau ls +lou isa +lat am +kyr gy +com pac +sd k +sap i +?? ? +liber alism +ep silon +ai den +w usa +spra yed +baske tball +kim ono +blue wave +ali as +ë§ Ī +mug shot +ce c +do gre +ad ora +ðŁĵ· @ +kra kow +intrigu ed +exhau sting +astron omer +ven ison +lady bug +ci v +bra e +us m +bri be +acup uncture +pembro ke +ke ating +chi e +y ad +t si +sm i +see ding +gate shead +lis boa +gy p +canv ass +ðŁĶ´ âļªï¸ı +op i +ni r +soci etal +ly te +ati es +c sm +ar tery +al in +aka poor +abstr acts +âĢ¦ âĢ¦ +teen wolf +ne we +travel gram +sentim ental +per ched +han del +ho ek +f ay +coordin ating +anim ate +man ian +effor t +jer ky +f ck +adri enne +ma bly +tra ding +my el +spi ro +sol a +stor ing +over drive +monday morning +dream team +pul se +bon di +ber nie +pgat our +tri poli +son am +plat t +âļ ¡ +ag roup +îIJ Ĵ +inv ading +v cu +k ell +ñ os +un dead +pod casting +mercede sam +mana fort +cor tex +que so +impecc able +pal mer +wil doz +sport sc +guacam ole +dispen ser +cate gori +stun ts +per il +invit ations +dune din +xi e +achi eves +saf er +pre ds +ph an +knuck les +k ak +igno res +lovemy job +aru ba +ound ation +datac enter +co vert +gr ing +cou ple +ا ر +vol i +mc cle +arti sans +lu do +kal am +arom a +under taker +hu la +wiz kid +gu mb +god frey +bakers field +ker n +engine er +car ve +pal in +guaran tees +pe bbles +b ays +zi eg +fin k +â¬ĩï¸ı â¬ĩï¸ı +down pours +ro chelle +rasp berry +ðŁĺ ® +gra phies +stom p +caf es +ari zed +utt ar +cal vary +dri e +crusad er +bus an +tux edo +si u +seam us +cul tured +blan chard +town house +ge red +butter milk +flu ctu +roger federer +hel i +ðŁ¦ ĥ +u ous +ram esh +mu ppets +email marketing +ye ss +br ice +ri zio +pel o +donnein arte +u rable +inve stin +bump ing +raji v +sav a +thro wer +fore x +o hhhh +th rust +pull man +r fid +sep sis +le ed +fri ght +roun ding +ne b +ph ins +ai sha +utili zing +squ ats +gold smith +j ic +bo ks +vau s +i po +exclu sion +tari ff +po kes +min al +land s +en force +washington dc +or char +g x +mar ys +ey our +aussi e +bak ers +un popular +latin os +lar ge +pu tnam +bol o +wa de +pel o +di zz +ob struction +fla ppy +weare the +depend ence +pajam a +e te +y ann +e wan +disc la +a ay +kar ina +e ic +an trim +w soc +neg atively +kai do +fotogra fia +dh ru +colo ssal +mcle od +k wang +mani pu +ex hilar +us atoday +summer slam +co les +tapro om +unbeat able +de ma +tic ks +k ling +fil s +campaig ners +ภķ +brew ster +audu bon +qu ay +ch s +ki gali +d ler +strength ens +som al +sign ingday +gol ds +pig ment +orche stral +g q +lin kin +ðŁı ĩ +ta w +algar ve +ho v +ear le +gold fish +am ig +ex er +ben in +dru id +ðŁIJ ¸ +she m +quat tro +mer cen +men te +incorpor ating +bon anza +state fair +en de +concep tions +e es +âĻ¥ï¸ı âĻ¥ï¸ı +d son +fire arm +orb ital +we h +multi p +fo b +requi em +p light +thou se +sa id +oc re +remem brance +n old +chi pping +be v +er t +ca thy +sy m +ri ggs +m ley +dialo gues +sl ender +how l +gau teng +wd w +to bi +smo kes +im plo +b pm +ad n +mom basa +cap sul +bloom field +artic ul +cle o +goog led +flu ffy +l ard +en zyme +ve sti +ibra hi +fl ame +e mea +out ages +dispro por +ble ak +an sel +ick er +st louis +stock market +good friday +sau lt +stal led +pro m +ep som +b é +the se +sau ces +me w +lit fest +pre d +re u +kar ak +si enna +ell in +bio technology +ï¸ıâĥ£ - +tac tic +sa in +por k +mon za +ka j +lu sh +compart ment +chang ing +shraddha kapoor +fo al +ar tem +cu ando +can ola +ori ente +me sse +d ited +br c +box er +bbc two +s st +ment day +em ing +de wey +kof i +âŀĸâŀĸ âŀĸâŀĸ +reali zation +smo l +tw ood +san je +flag staff +ber wick +cor set +can ary +whistle blower +et ched +com posing +squee zed +bow er +auto desk +ne h +mathi eu +ba ja +Å Ĥ +hy dra +da im +am eri +insi sted +mer lot +gar ros +heart news +gaine sville +cut ler +bo de +ðŁĺī ðŁĺī +lew es +scoun try +g sa +us u +cc m +god awgs +phara oh +cra e +mor ley +hyp noti +f ades +neur ons +fu zz +ing co +high landers +star k +vig ne +pac kets +amar illo +reu ben +insul ts +bas ic +vec tor +n me +ac ruz +tro s +transm itter +ðŁĺ ŀ +interpre t +ðŁĺ ² +pre quel +mc gowan +dis semin +ðŁĴĺ ðŁĴĺ +mascul inity +indie gamedev +ali ve +te t +pe tal +ema iled +ar med +ko o +he er +ba ird +super junior +metro polis +delav in +decl ines +stit utes +Û ģ +p tbo +g lan +cho res +e aling +chri ssy +ste mc +vi an +assassin ated +pron ounce +illeg als +discover y +cav ill +fri fotos +f al +so i +sabot age +t int +p dc +ðŁİīðŁİ Ī +ãĤ Ĭãģ +ji o +endeav or +in sig +commit tees +she arer +me tz +mar rying +h dd +g by +fre t +tri sh +pu l +scrip ted +sa ki +l w +ke ye +shim i +nan aimo +ca h +à « +tem pered +ici an +du gg +dish washer +air field +s rugby +gr inch +y st +r ms +mahat ma +lan kan +disc ar +dige stion +no des +l ls +om ic +gu tter +tis garh +feder ico +election day +bo he +master card +fire ball +âľ Ķï¸ı +oy ster +p ong +do k +en route +m vc +beat the +ali stair +shu b +sh aming +cherno byl +ghi bli +the s +pin ion +d bs +sal ts +ic tion +epi ph +nc pol +in convenience +whit ley +inspec ting +wood ley +wi ener +skil let +no les +m ca +h ina +a sha +willing ness +well ness +tam ed +show time +dis advantaged +ber nat +us n +mission aries +coun selling +arrog ant +quant itative +leg alization +ho dge +energye fficiency +cameron dallas +pos sessions +p bb +harris burg +v g +hindu ism +happy thanksgiving +fi b +re acting +tweeta picture +pol iti +mu ppet +hur rah +pac e +coast guard +guar ded +as am +par ry +fore very +x q +oom f +ke anu +j ind +ri st +customer service +sac red +ðŁĺ º +ton er +occur rence +mat u +val dez +red d +is ak +power rangers +pe asant +raj ini +abra ham +e mil +car do +tr il +hair styles +obsole te +sam pler +direc tive +delavin kisses +ver ton +glo s +sp ay +paler mo +com ets +man ziel +chicag of +ski pped +pic torial +h ant +b mi +a ol +re opens +pad dling +devo s +fra ud +bas eline +que ues +sp ired +sn are +eu ve +descri ptions +daisi es +ca ching +gall eria +tri mmed +stin o +recy cla +ic ular +bir ken +raw lings +fli x +chic as +b gt +lik eli +argy ll +thel ove +ga ston +bl anca +ha k +f one +sailor moon +h aci +ima c +fl yn +de can +bel les +ap ic +zo g +taun ton +con stance +lasag na +ker nel +in ka +har bor +collec tively +calcul ated +av ille +shil pa +pur du +gi mm +fun er +a est +pembroke shire +nighting ale +n unes +hyper tension +hu bert +sli ders +infer tility +comm ended +transat lantic +metr ical +!! @ +Å Ł +ss g +bac ca +inver ted +fun factfriday +it ans +albu m +acqu ainted +ri er +whel an +sar ab +mu e +snoo ze +pi ff +agre eing +sp itting +jer maine +n ye +âľı ï¸ı +am bush +ze ph +con greg +univers ity +s app +wann abe +pat rice +ib d +do glo +fri dges +sun d +king ston +ar gon +kam en +hardro ck +ds ley +do lores +ì ° +ota ku +pi ping +be having +âŃIJï¸ıâŃIJï¸ı âŃIJï¸ı +blue bird +an sari +teapo t +fire work +cro p +log ans +ty ped +thick ness +ig ers +c fp +dys functional +contra sting +et ty +aston martin +tx st +dra grace +at tributes +marath on +manu scripts +john stone +ðŁĺ± ðŁĺ± +bo er +ay u +aru gula +poo rest +con du +assu mption +anag h +no h +delav in +sit ter +g ö +mor ow +kick start +com i +gl acial +ghe ad +ba in +ker shaw +en dof +fre ud +om at +i af +hu g +sign up +each other +defin ite +tu bing +shak ira +ðŁijı ðŁı½ +uu uu +sw in +sham bles +ol as +sk ell +brit ain +kn w +clu tter +om y +j ens +hang ed +city scape +scra ps +un locking +dead liest +er no +breast cancer +a it +inspec t +fu ri +ðŁĴ Į +ku d +ju le +or ah +mi ds +m dt +bur gring +r attle +pu sa +stal k +cle ans +iss ance +z ek +worth it +nam eis +musko ka +council man +urban art +bar rac +un solved +tu l +g ita +white board +soy beans +em ent +cont i +saturday motivation +conveni ently +doc king +t ado +âı © +sp ino +puppy love +po f +fabric ated +robb ers +adop ts +ti fied +kk r +indulg ence +notic eable +macqu arie +chap el +sensu al +ki ko +melan oma +lore tta +li ance +ab en +sp lus +ga al +ac ele +lib dems +compar isons +ðŁĮ µ +rhy thms +mer y +en capsul +nap ier +ðŁijĮ ðŁijĮðŁijĮ +ðŁij IJ +plat z +fre sno +re formed +ran bir +el it +the best +bhu shan +vin nie +impro vised +s ittin +re created +e ba +ec ker +ac rob +pon te +cor d +gi ddy +eur usd +fe ver +intu ition +gar i +dum mies +bud weiser +amend ments +te tra +sch nit +ay as +mar ys +ci st +k ani +ker mit +ðŁĺ±ðŁĺ± ðŁĺ± +tin ker +strol ling +di visional +niger i +omin ous +menstru al +kar ab +k hy +bw fc +pan handle +l illi +well er +stra pped +son the +transfer ring +ethe real +sne aks +ru dol +gab les +jac king +cin code +for tune +canadi ens +con for +ab normal +frank lin +tit a +mu la +persi st +cu ties +ki el +ðŁĩ± ðŁĩ +her mann +aw k +fi asco +ko to +we ta +hi ker +budd y +preven tive +mcgra w +game boy +forsy th +top shop +si ob +sad h +in tram +follow art +so aps +dragon ball +ou x +morri son +๠ĥ +lu bric +adul thood +morri sons +âļ łï¸ı +her mo +ta ka +stall one +mis use +team gb +ra gha +con fined +at y +hom ophobic +nw o +sky news +ho ya +ac rosse +wi iu +pur ée +jed dah +ðŁ¤ § +advis ers +ph ine +an is +scrump tious +ë° ķ +c ke +vin y +ter m +s dc +o do +home school +vas c +leop ards +debor ah +illic it +cur ran +as roma +nau ght +mar ig +brand i +em p +ðŁĺį ðŁijĮ +î Į +su spend +lu z +initi ation +sch aft +jensen ackles +craw ler +post doc +des ks +trail blazer +den omin +tri x +no ise +po et +± ï¸ı +s mug +vol atile +proof s +pharmac ist +sardin ia +mash able +kim chi +co ed +schal ke +doo dled +c sw +sh ur +ro x +do k +chris brown +mathemat ician +ab ound +ang elic +rock ford +d ole +yor kers +ms n +g man +xavi er +bor rowing +mark ings +longh orn +k ja +diver ted +mm it +euph oria +ay yy +te a +pa h +ck i +un cut +li ven +ky ung +fan art +mer ing +red ding +amo vie +gri di +c thulhu +schol arly +ju dah +th bewithyou +eu calyp +ðŁIJ ķ +hert fordshire +cour troom +by u +auc tioned +ple ase +mar cia +ê° ĵ +succe eded +el as +arvin d +t lot +saig on +re tt +ra kesh +fd ny +as en +se bring +gladi ators +you know +v lad +gol a +par ap +ÑĢ и +sab cnews +one team +oh l +sun e +ri j +cd c +star gate +run down +plat o +ph c +chat ter +ra viol +mn f +mand ala +li et +ภķ +mari a +hun gover +consoli dation +fer rell +tradition al +ilove art +gal ap +ðŁı Į +que zon +espa ña +ðŁĩ¨ðŁĩ Ń +ho bby +steam boat +mali gn +guil lau +pro hi +its me +íĥ Ģ +in scription +al z +mari an +k ade +mm on +adju sting +ne sts +intern ally +ci r +vik ram +mal ala +k ph +fel icia +the real +cap tivity +at is +marcor ubio +kale ido +che v +mano j +le more +gent ri +vi ps +tro pe +" âĢĶ +pair ings +mal nutrition +fr ay +desig nation +brun omars +az e +tor rential +pan zer +ga il +under the +the ological +schizoph re +dazz le +freder ic +mo par +ad illa +so ggy +ra un +medi ocre +colo rec +i fe +p inst +blu ef + ² +world water +gir oud +clar inet +ad olf +tar antino +receip ts +assu mp +ðŁij Ł +coffe es +âľĬ ðŁı¾ +du plex +s of +r x +lin o +timber wolves +pan dit +mo tm +e ga +ay ama +ach s +outsi der +ll en +co er +til ly +cheese burger +ma ds +ple dis +emp ty +national parks +az iz +p mi +jun kies +f ener +sq n +è s +gener ation +cleop atra +bhuban es +mosqu es +ty free +popp ins +tw c +or well +n age +ka whi +hol low +dal ai +¨¨ ¨¨ +ou ro +m health +gi on +az o +vis as +reneg ade +re ic +w sop +ðŁĴļ ðŁĴĽ +e chel +tox icity +mü n +bun k +stimul ating +asth our +\ ' +ep h +ende mic +cn bc +shrin king +peabo dy +michel angelo +can yon +wal e +su mi +si ders +inu it +? . +profession alism +dr acing +plat oon +p ons +out bound +maple leafs +de sol +cen cy +a than +ver ma +ru bbing +ok an +ðŁij ł +mull ins +authent ic +Å į +alman ac +ga ia +bb q +on imo +ke h +ty a +tou ts +y av +re posit +, . +wi ght +se eyou +cal lof +done sia +bar gaining +gr anth +sd su +amphi theater +p su +re watching +wine tasting +peak district +dete cting +thur man +phe e +èª ķ +u mich +re r +sculp ted +go le +name sake +ðŁĶ ģ +serv icing +bau gh +pu gh +pen cil +dar th +munch kin +at orium +ten ers +sun y +rolling stones +mag ing +star rer +i dris +fe instein +ag ron +âĺºï¸ı âĺºï¸ı +supervis ed +chamele on +aggre gate +succe ssive +mo gul +inst yle +pol dark +custom e +ohio state +ha ya +ci des +broker age +angel ou +fifa wwc +de forestation +al ton +pam ph +hu gged +ho bo +change able +ku ber +bur roughs +demon etisation +cape cod +vers atility +or ice +le ila +womenin science +tu a +he dges +embarrass ment +ali fe +so ars +ni ghter +hy mn +gi pp +chas u +tech s +ni all +k illa +hi ka +cam els +valu e + ¢ +sc oops +mah moud +clu sive +adri ana +pac o +oz il +un as +transl ations +whispe rer +s bi +bu xton +bio tics +indi ffe +ken ney +k lar +et ching +barra best +inst ability +se ine +vo tel +blo gged +whis key +my space +t ant +lan dia +give back +illu s +aw ak +ac ab +f bloggers +cloud computing +blat ant +syri ans +band ra +sty n +an em +ke ted +kar thik +barun sob +pin ot +gu bernat +gay e +arti ste +i fied +conven tions +hu an +geni uses +eeee ee +fol ly +somer ville +pride month +ðŁĩºðŁĩ¸ ðŁĩºðŁĩ¸ +chemo therapy +paul s +bak ar +ìĦ¸ë¸ IJ +taiwan ese +fol lo +c ss +re ign +nn nn +fla un +catastro phe +iti es +frag ments +extre mists +ym oun +car men +eze kiel +conne cting +se h +man ta +remodel ing +we ymouth +at oms +ce m +ne well +lu mi +the open +mo c +mili band +g land +z shq +mag gie +mani acs +m sp +ad y +cre ams +le anne +e sta +py g +af finity +pray er +dun bar +ligh troom +ac adi +wyn onna +roman tic +state dept +sick le +wh os +lam o +et our +fin ity +shru b +shar pen +pun dit +ed on +af ore +mar s +jeff ery +ter ps +medal list +kath arine +accu sing +ta z +roy d +from home +confron tation +alle gh +ðŁijī ðŁijī +refresh er +ran veer +never land +jo jo +lu crative +en am +ca ver +pa edi +man jaro +flu ids +the ssal +oppre ssed +mu ss +joh anna +Ø ® +cn g +buil dthe +sett les +s ith +fu ego +cl amp +ar ag +pay er +ted x +mand y +inter stellar +fr c +ch and +b cc +mo lo +len til +johan sson +grims by +nature lovers +ðŁļ¨ ðŁļ¨ðŁļ¨ +shin de +x in +international dayof +transiti onal +sat a +cad dy +wo d +if u +ha ys +holl yo +j ang +ir c +co im +grad able +" " +ðŁį ´ +ঠ¾ +a el +n yo +west lake +time out +sof i +phenom ena +cultiv ation +ag no +un armed +so t +con j +gen o +royal navy +nutriti on +fair mont +ti relessly +sn g +re ty +mic a +lu cent +slo ane +droo l +riz al +od ell +critici zed +. '" +la ze +deser ted +co der +pra s +l illian +itiner ary +dav y +an ap +whi pping +hobo ken +kare ena +çľ Ł +vi us +ter n +nan tucket +mis understood +bu laga +st ant +chin ook +z am +reli es +d ss +ed mond +sket chy +m ell +fe x +rec tor +dist ill +day dream +wine maker +ri pley +billion aires +hel ene +ati f +cul prit +bertr and +wou ldnt +ma pped +v ak +gla dly +parliam ent +kidlit art +ware ness +goli ath +âĨ ĵ +view point +tat ted +fu ls +dor sey +ang lers +li ds +ki ya +bow les +be h +b ite +compati bility +ance stral +pro x +beha ved +gubernat orial +ch field +sab an +z h +teen y +shibu ya +holli day +pan cy +âĿĦï¸ı âĿĦï¸ı +seun gri +? , +ðŁĩ¦ ðŁĩ· +im itation +impac tful +any i +gene vie +añ os +bate man +gli der +af ar +ra sheed +effor tless +sh war +dach sh +er un +at os +kin i +ch d +kha ki +k lin +felici dades +bel o +as l +to ppers +fin ley +stac ey +rigor ous +kar ting +le ppard +car michael +be ret +c se +ak hi +mer ingue +ab an +ha ke +ger i +er jee +re sto +comm anders +pr it +fl or +ad ven +ex termin +remain der +å IJ +es g +martin o +lulla by +| @ +mi gn +in store +big bang +cor di +cau ley +ante bellum +dg ate +cro ck +span dex +scaf folding +ore os +ê°ĵ ìĦ¸ë¸IJ +pom ona +ma uro +uni versi +re mi +af ootball +t ant +sm alls +ne h +worl do +tropic al +mor ph +jav elin +gla r +arqu itec +reminis cent +tu bs +spide y +make u +syl la +progressi ves +blo t +shor ten +keep in +ch ak +ang st +super food +decad ent +ston y +neuro logical +ar boretum +ann ak +fe ma +per cu +dis respectful +small biz +lo x +co om +c sc +bs bi +pre valence +him ss +esp an +mo ga +fr ampton +sky map +mas se +levi athan +( ). +noctur nal +car ameli +ang or +amne sia +outsi ders +she alth +rhin o +ant ag +ag io +ðŁĴ° ðŁĴ° +take me +kab addi +c si +m sh +coch rane +thessal oni +sil a +ha us +du sting +obe se +mack lemore +mani sh +len in +m dc +gro wn +shef field +s rs +ke le +car son +ch um +dah lia +can tore +opp o +how ling +cyber crime +sur realism +sc ran +fa iz +thre n +rac ists +r out +pk not +se mana +sin i +mc cull +ma chi +alfon so +y b +sar dar +kend rick +den g +reci pro +on f +doom sday +bri bery +custom iz +art is +c pi +ðŁĻĪ ðŁĻĪ +sla va +let te +en s +âĿ¤ï¸ı ðŁĺĺ +cra yon +ad an +tr c +migr ate +simp son +row ers +king sley +farmers market +shee han +ne phe +bor non +car ton +mic key +all ure +u lu +sli pknot +heb do +gui do +dog celebration +online marketing +acceler ating +) .. +origin ated +macar oni +ed tech +out field +mit z +disc us +adverti ser +man or +ha shi +descri p +cap ita +ful bright +recep tor +con n +con ey +spion age +r attle +pre st +u li +blog post +acker ay +) âĢ¦ +red velvet +mat th +inspir ing +b sd +ker ri +po con +mil lar +re pur +accent ure +ä ¹ +ram bo +ragnar ok +dele ting +british museum +pat ory +leip zig +flori an +sci fi +in ers +br ate +yo y +melis sa +ab er +ma sa +po te +mosquit oes +transpl ant +r pa +; )) +bast ille +yl an +joye ux +melo dic +cap tions +atri st +roch dale +gott i +pew die +cuties aturday +who is +aqu aculture +tiv a +sp el +he ss +ha ji +fred die +co per +brand o +v k +photo book +* , +my dayin +micha ela +brune i +sr ini +in te +Ä ± +de ol +d fc +separ ately +bun d +ve sts +to c +me ck +rein forced +constra ints +car roll +sq ft +re ver +cam per +bird man +in action +gener ators +triumph ant +pe sts +o vo +gy pt +al amo +sc aled +suresh pp +sd n +is mo +gi os +) @ +justic eleague +restaur ant +gab i +den gue +next gen +exemp li +ap ex +inspir ational +down side +kid z +u pl +et na +alvar o +fel dman +bar net +m ha +es ch +bloo ded +>>>> >>>> +kan i +ho fficial +casablanc a +bir ds +ty ga +sw amp +o day +new castle +nb ap +ci sion +cho ols +af lo +ne p +mon ton +ak b +super model +down time +th os +sc wx +snoo py +ag greg +yo ke +nor cal +we tt +prolon ged +me tast +beat er +f ta +t lap +disgu sted +y h +voice over +itch y +ip c +ðŁİ ¾ +phe asant +stra its +ram pant +j g +fer til +assu res +fortun es +sal inas +liz ards +kett le +i bs +cyn thi +he g +mc cr +soccer oos +happen ings +cor den +ðŁĺĤ ðŁijĮ +t ches +egre t +wolver ines +congratul ated +ho gg +bott ling +wr i +fer ri +bo sch +af ire +og den +s jo +j dm +sv t +con tex +tol lywood +min k +me se +super sonic +op oulos +å ¸ +âĶ ģ +knuck le +gu ise +gam i +chu cky +z inger +radi al +compla ined +bo da +fe tal +discipl ines +cor ro +ðŁĩ®ðŁĩ ¹ +op ted +filtr ation +ad nan +em cee +mi stre +insom ni +fer gus +tra jec +on don +med tech +tanger ine +madra s +gru e +cab s +z hu +sureshpp rabhu +insul ated +day swild +pp m +band ai +v day +s ff +squ id +lo thing +not dead +expre ssive +cu ll +ala stair +x u +up front +fish ers +en es +um d +dis missal +sti er +sel s +lu st +re active +prote ster +eyel ashes +al im +goo de +gre eng +da ir +com pen +anush ka +proto typing +ma pu +bear ings +ðŁIJ Ł +for me +bsbi botany +timo thy +out skirts +am bed +are tha +wend ell +stre aks +ni m +k pk +sne e +fit ter +quo ta +p ate +win ning +ðŁį Ń +sho pping +ma inst +cul ver +ste vie +mcfad den +counter parts +gren fell +fol som +dor set +tech crunch +⬠ħï¸ı +tip tuesday +us l +tre x +geor gie +ranveer official +lic ks +se wn +k f +' âĢ¦ +jap s +p ate +orth op +fe sta +stra s +mon tal +hammer smith +fore most +wido ws +mad re +ite z +mito chondri +lig ans +z ona +cari bou +m ss +andre i +weather channel +gh c +: ... +ta ft +awe ather +al isation +bru tal +bliss ful +nik ola +mal icious +q m +mpg vip +bro die +bl itz +applau d +dri bb +v ague +dog go +transl ating +interpre ted +hat ched +ge tyour +benefici aries +spar ring +caes ars +aw illiams +la hat +bro ke +ti mp +virtu es +rel ying +pie tro +k tn +ici sts +pab lo +lou i +a ag +pn pp +cha st +pul ses +fini sh +usair force +type writer +thomp son +dog s +ut to +ãģ į +sand al +new ly +do ge +z w +wan kers +ne gr +mu cha +determin es +black fish +sk unk +mu ps +instru ment +phy to +daysto go +skin ned +hai der +con ten +ðŁIJ¾ ðŁIJ¾ +we iler +undoub tedly +chair ing +wall is +sh ard +zind abad +adul t +absor ption +pre sto +deplo ying +drum mond +battle front +seag ulls +how dy +juda ism +des de +part ition +âľ Ŀ +no logy +national bestfriend +lesn ar +film fare +co asts +christen sen +ac an +mb u +co pped +ru bble +sw c +fun nier +far ther +where as +nano technology +with stand +pil low +bow ers +to pe +it ly +con fit +ma kar +comfor ts +bo sh +cli pper +bal la +sti k +mil b +safe guard +musi que +eas port +ya z +pad ded +bad er +fore ign +chop in +archi ve +o ka +tran sporting +tml talk +aj it +consequ ence +sc roo +ff o +collabor ated +pug chat +ye mi +jav ed +au burn +o of +ma w +sau cer +miti gate +i les +evangeli st +ter ie +re cl +indic tment +cat a +bright ness +may the +whim sical +un lv +key word +cu min +med way +west world +tra w +im posing +form ity +coul ter +ab z +ny pd +grass i +kel sey +qld pol +clock work +f dr +di anne +âĺ ij +ad h +p ann +bra vely +ae ge +un lawful +ver di +pocaly pse +phar o +kar la +reson ance +ma stiff +la dak +bu u +ma iled +hi i +craw ley +tor rent +mach ado +liby an +effort lessly +fal sely +q vist +ke ef +craf thour +cheri shed +val kyrie +s ari +kal amaz +be he +ðŁĮ Ļ +th im +ro ddy +col trane +but chers +ach im +wk end +awk ward +cab rera +:) ))) +fran c +decl an +con dos +a ja +pandor amusic +char ter +ph ill +mon trose +hatch back +handic app +gre aves +eucalyp tus +ut most +t son +bur ton +mid wives +in cur +ðŁĺį # +moo d +compre ssed +tom a +must ang +mo g +as ana +te stic +sho tel +in sol +cor sair +nh q +ben ny +sm ma +kap ur +in con +jon as +ener gies +don al +as ad +se z +n pa +archi ved +stimul ate +do p +hy d +gri eving +ãĥ Ī +ron a +why te +tree house +ss ell +sand ro +ko bo +ther most +se clu +hi ya +ge ez +mam as +prisc illa +flav oured +fas s +w old +maker space +cospla y +p tv +happy valentinesday +sequo ia +love craft +gu an +d tm +ci i +yoko hama +pos thum +re q +ðŁĶµ âļªï¸ı +galat asar +dol by +hamp tons +disturb ance +stone henge +ok c +disrup ting +month sary +jun gle +head lights +du stin +micro sof +happy mothersday +ko ko +gra zi +te sto +na idu +mal ay +ari al +ru mb +ab oo +har man +tra pe +spo ils +je ho +go dly +lock screen +z un +pi ous +ma gento +l enders +prob able +corpor al +m our +aw al +su a +call me +ton ne +go vin +devast ation +x j +gear box +war lock +per me +it ate +gaza underattack +du val +paras ite +clement e +le th +i va +fro zen +tho les +to bin +cair n +s ill +luc kiest +conver ts +st ale +pan cra +euro pale +wis dom +sch ur +ì ¶ +verti go +bi j +u bc +nu re +righte ousness +mt c +factor y +ver st +revers ed +hur i +hee chul +fab er +ar r +ul ous +ven om +ph at +green ery +bra dy +à ¦ +: (( +never giveup +di sha +mo ta +health care +dun ham +dex po +den zel +bb ins +f ics +wh am +mc g +eli an +wat a +str alia +tel lu +pe sky +spin off +ar moured +re acted +do fficial +te du +sag ar +mor ally +paralle led +fi os +dow ner +dau gh +re do +world cup +tari q +bar ne +glaci ers +oc cult +barbar ian +her mosa +!! !) +y ur +inter nation +p ss +sit u +p int +american air +sw am +dopp ler +ðŁĴĻ ðŁĴľ +cincode mayo +le van +hell enic +mc ne +ju di +yu h +st x +qu are +ðŁĺĤ . +sti g +g els +mot ley +hard work +euro zone +e ad +ç¥ Ń +seab ir +ci us +la id +alpac a +presu mably +pewdie pie +boo ted +am ari +tam ine +sol ace +bar row +acade mies +x ian +om ination +dun geons +b ma +de ity +ai k +stab il +hir a +affection ate +ving ne +new port +ãħĭ ãħĭ +thir ds +re tains +aroma therapy +ski er +ni ma +do pe +cr inge +con domin +to or +anim ator +sar aj +seas cape +minim alism +lake shore +calla way +berg man +à¤ Ĺ +whisp ering +stupi d +ri ghtful +requ is +ir n +se va +ut pol +tuber culo +squ ish +de but +govern mental +christ ine +all man +weap on +s ito +bur i +lo lita +leaf y +fu ch +tin ted +mck en +a hahaha +ðŁĩµðŁĩ ¹ +repe al +ne gan +ðŁķ Ĭ +tail gating +game insight +ðŁıŁ ï¸ı +yaku za +z t +ti ring +pro posing +bow lers +tra itors +ak shi +cler gy +cit o +up sets +tu scal +symph onic +sil ently +shu ff +black well +ðŁĺĤ ) +ko be +rober to +ri dg +dc u +mer ino +ft p +east side +. ~ +nb l +mn leg +ts for +frau dul +ca pping +in my +gymna st +ston es +ss in +twe aks +shag gy +oak land +dem sin +sang ria +mm va +hen nessy +down ton +ri ghtly +in it +aga ve +ob last +northe ast +friend ship +dal a +tro phy +ðŁij ½ +mag in +margar itas +ê · +ww fc +fa sh +di ke +cu d +char t +ðŁij ® +refuge es +jop lin +n cs +imp y +firm ware +pas cu +flam in +health tech +bell letstalk +w aka +ol ls +la go +co wan +bombar dier +sh ome +ðŁĻ ħ +mc master +na ve +well s +u ta +tell ers +mis fits +kap il +face off +af firm +a pro +whit epaper +super yacht +speci mens +al located +... , +- __ +ka w +dachsh und +djo ker +s work +qui ere +or um +ðŁIJ ł +som m +c mt +ingh our +skin ny +lgb ti +gi ggles +break away +resear ched +par ity +my al +ms l +re tained +si vity +make inindia +sol ves +defam ation +wal tham +sri racha +road way +concep tu +al in +iw ant +å Ī +del ft +tender loin +ga ins +faul ts +sw ire +st ellen +pol lo +dy ne +bornon thisday +asdf ghj +sq l +sali m +advis es +vo ip +ìĹij ìĨ +un touched +she il +ontari o +uph ill +so bre +de shi +nov ella +du tton +craw fish +ا٠Ĩ +ma a +tw ine +kal in +ðŁĩµðŁĩ Ń +ye ss +brook s +hoo siers +ton ka +umbrel las +ay ers +ate am +acqu iring +su ction +ä n +wi es +tari ans +soci o +mat tb +shepher ds +o so +charity tuesday +s logans +ninj as +al bat +by te +bash ir +trampol ine +mydayin la +i ja +bas el +ror y +gol die +fi rec +un noticed +pecu liar +sch a +ker son +mour ns +liquid ity +qu ipment +hi bs +ar s +aeron au +slide show +sla bs +delici ousness +sk itchen +hta fc +full erton +cre ighton +aer ob +procrastin ation +az ores +white hall +uss occer +medi ation +djoker nole +and me +um en +noxi ous +jo ss +ili fe +anni vers +sudan ese +et res +under mine +whole foods +diso be +kor i +ade le +eli z +can ti +al on +gymna sium +sarko die +meteoro logist +yl de +ste en +stamp collecting +nas al +lo tt +fran ks +ex ol +ack i +good year +animal rights +y les +vio lets +mm es +s thel +ra pping +tu scan +wai ver +tur ner +eat local +northe asthour +anim ations +tom morow +t sh +ff ame +bra e +pe tron +glam our +br yn +d cs +bal es +ðŁĶ ¶ +bro v +bre v +b ons +physi que +car ne +x e +elix ir +vol ved +l oma +ìľ ł +æ ĺ +van u +ri gs +bal ance +va res +bon ita +sprink le +perfec to +di on +le ak +calcu tta +o ba +d ma +c mon +tun er +pneu monia +bo gus +apolo ge +cl ough +bor ne +)) )) +revi ved +o varian +ner f +c legg +fan fest +cho u +reali zes +mc n +li gu +leg alize +just saying +for ster +bo sni +k hi +in dom +hei del +en cryp +si ss +ed di +mar bles +brisban e +y ing +pre paid +wal sall +cooper ate +orche str +mar isa +ho wie +che wy +bren ner +andro meda +e gan +sto cki +cav endish +ag an +ban o +de ir +go g +bl k +re thinking +ch ig +rhe u +sni p +p eng +semin ole +m swx +an nex +lyn da +lewisham ilton +cu mul +tb l +dolph in +agu ero +........ .... +pre lude +at our +gr anger +too ting +ro tun +dis ar +home items +da res +**** **** +ðŁij Ĩ +compre h +jin x +as well +iri e +circul ating +ðŁIJ ¥ +over board +cultiv ate +rhe tt +oriente ering +ca k +bal kans +s itt +jas min +britney spears +ro tor +se aling +g bc +oc ci +f as +eman cip +com er +war time +tic kle +son ny +pac es +log g +at rix +sr p +g win +do bbs +uz be +the wanted +dru sh +ex tru +m icky +honore es +dar win +re dux +mm j +ram i +jalape ño +io c +do ver +ju ju +whit ney +s eng +en ly +au ch +archipel ago +vigil ant +man gal +wil dest +parano id +hal i +bb ly +sanc tioned +real ms +con co +u ddin +c sk +play time +libr a +sav ag +oc tane +rec tan +re turn +par rish +mor rha +cc p +c mu +sa iled +se vent +ro sie +pil ing +he w +boar ded +seg ments +neph ro +( . +cr ats +bak es +ðŁį ¸ +back tothe +sibl ing +kirk land +ke o +gu wa +bre ads +ðŁĺľ ðŁĺľ +t q +haras sed +ga u +wil bur +j isoo +ep er +li sam +tri ppin +sh ino +ru kh +beast mode +cho a +inst aweather +rich land +gar i +fe z +cowboy snation +fur suit +k run +a en +sycam ore +se gun +ent ennial +di h +o ax +demsin philly +ðŁĻ Ģ +sn hl +pen nies +pass words +ma kin +ty e +d eng +kni gh +jeep life +hel pline +a for +zz zz +ste amy +pic ker +iter ate +happen ingnow +ki b +bloom berg +martyr dom +bul ly +assor tment +a hora +zo e +no i +illu stri +agar wal +p sc +electr onica +recruit er +gar diner +rad ha +naf ta +dot net +pi ero +geor g +bel s +ðŁĺĤ ðŁĺį +tuberculo sis +run nin +mor is +haul ing +ev oc +bre thren +sha ir +frame works +a stu +ri gid +ku ma +kre me +jin nah +insu rers +ny u +f ere +nol lywood +good vibes +- ... +toi le +sk ril +instaweather pro +cze ch +pa vel +one piece +nike plus +fi let +cav ity +ðŁı½ âĢįâĻĤï¸ı +ðŁİ £ +dra stic +dail ys +siam ese +re bu +oste o +lar k +f re +sh elling +p é +glad ys +ðŁıĢ ðŁıĢ +gusta ve +submer ged +grand stand +att u +won t +f pv +b ley +jon i +ang ames +weigh ted +al ou +ठ¶ +les bians +f j +anni es +am l +dor ia +dav in +be ta +can c +madewith unity +ha j +bad lands +mu l +blu ec +pa wn +cov ington +neuro logy +htt weets +dysle xia +thel ove +ne at +fork lift +autom ate +une ven +monte ss +he in +ha g +rel ics +competiti veness +can elo +mar tens +bullet proof +sk ittles +g ya +pri mo +americ afirst +woo o +abor tions +?? !! +ma che +ld ers +rl ly +preli ms +direc t +cour se +swa in +super cell +ec centric +sting ray +ple ts +wil cox +west in +okan agan +kir an +car bo +bomb ings +ra rest +bo h +gaw d +di gg +mo ana +enti rety +en closed +dodge ball +par ton +milky way +at r +thorough bred +re ally +qant as +epiph any +ine e +aero smith +spi eth +ar thro +ell ini +du bu +bra ving +âļ½ âļ½ +re structuring +illumin ate +equ ili +mp i +ash ton +pony tail +ma scots +flat tering +cru m +ast a +à® ° +stranger things +bar nab +ر ÙĬ +make shift +got cha +will am +cho irs +kilom etres +gho sh +eu than +dol ly +un ning +the ar +cre we +w sw +j ace +dis miss +ke an +ho ta +kh at +~ > +thir u +ren dez +hart man +tee ssi +cas ca +z ah +hydr ange +fo d +aw p +mzan si +thick er +nago ya +ne va +sti que +cast el +dam ian +there by +ji ang +ale k +music islife +ra q +calla han +gou ache +somal iland +sean hannity +ra heem +lo se +elo ve +whar ton +rectan gular +illustr ating +har ne +auti sma +scra pped +ell and +decre e +nag pur +ki pp +so re +n md +ma as +gun a +gart ner +bel li +then ight +je on +gendere quality +gi ver +a el +gar ments +ne u +mardi gras +mar sden +ro wer +pollu ted +camer aman +vin od +be asley +cro c +ji u +hollyo aks +anesthe sia +al les +ste ward +lati mes +ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸ ðŁĩºðŁĩ¸ +tic ian +gor ia +come dic +ðŁ¤Ķ ðŁ¤ĶðŁ¤Ķ +nai ve +sli ons +ł Ī +bur glar +ðŁĺŃðŁĺŃ ðŁĺŃðŁĺŃðŁĺŃ +york shi +se ñ +fan boy +lau rel +inci dence +potom ac +rober ta +presi den +pr yor +os bourne +w ku +te me +pal ae +ðŁ¥ º +re boun +itu de +red dish +k hand +coloni alism +north carolina +ðĿ Ĵ +manne quin +lady bird +ta sty +knowledge able +g shore +ðŁĮ Į +à® © +qu aker +salz burg +med alists +chy na +bridesma id +ma ori +ro p +outra ged +in adequate +truck ers +al ana +ìĿ ¼ +ri x +oooo oooo +command ments +lam beth +aa j +eco friendly +bla z +morecam be +boun cy +rou x +rai ded +mi zed +sh c +gaw x +labor atories +ru bs +rest room +consult ations +ca jun +virgin i +so ir +rev ue +ple in +wag er +ç ¹ +we do +growing up +! ðŁĺĬ +face ted +sin ners +ho vering +ti ene +seas oning +an ja +leg go +il is +fla x +dev o +ash ram +mati sse +ker i +go wer +bo tox +mar shes +unh cr +ts m +opti mus +dun i +stu ffs +so k +order ly +n bad +islam ophobia +raviol i +fab er +cre ds +won ka +in fusion +over weight +daily news +assi mil +acol lege +medalli on +kili manjaro +sti ff +tham es +sun ken +th ard +my dubai +hilari ously +han nel +plu mber +fair view +separ ating +rasc al +qui en +necess ities +confeder ation +ll ll +: ] +weak nesses +bron co +ra ffles +el ot +ãĤ¸ ãĥ +advent calendar +ðŁİ ¹ +stra vel +tun ic +k su +im peach +e spionage +! - +di ment +cur rant +bio de +commu ting +by ron +ðŁĴĵ ðŁĴĵ +shad ed +tr uro +cray ons +ar ne +h sc +fre aked +dram ati +fle ek +u cd +marl borough +^ - +cross ings +mal o +black ops +bin ance +cho ked +chen ey +pl o +ge stures +val edic +ryan air +rem ington +v cs +mc kee +ec z +be gs +nail art +mayor of +happy fathersday +war t +pet itions +n ingly +clean energy +bro x +sl alom +exist ent +ab ay +ug liest +tom p +stom a +sel by +goal scorer +ben ji +overwhel mingly +lan s +semiconduc tor +south korea +re scheduled +sk yl +en listed +dow ski +si del +rosen berg +nas ser +white head +pri us +har are +en n +ry der +í Ĥ +mon g +clas ico +transpor ter +po tty +is me +** *** +vic e +sk it +ode ssa +l mp +her n +raci ally +pin oy +paragu ay +obitu ary +go es +bu cha +side walks +angu lar +un constitutional +transiti oning +i bu +gu ys +un packing +oooo oo +black girl +ber gs + ¯ +wordof theday +trump train +thunder bolt +m si +fasci sts +ठ¬ +t sk +collap ses +raje sh +loveis love +migr ating +set back +ðŁĺĬ âĿ¤ï¸ı +t els +safety first +nar rated +jae joong +un answered +lique ur +en nes +dal go +bill ings +salt water +mer maids +lon gs +clap ham +we arec +pic collage +n ach +h ace +pois oned +lo th +ag na +adel rey +guar dia +poli shing +peace keeping +d all +p isa +la pland +process ors +de andre +so bs +p once +dra ins +c be +ðŁİ¥ : +spla sh +meat ball +fon tana +worcester shirehour +ne v +bri sk +b int +ac r +po x +cay enne +skril lex +j fc +hahahaha hahaha +gla s +en gul +tempor al +oni zed +con cre +com pose +vibr ations +plant ers +fer t +criticalrole fanart +t bli +sch allenge +huck abee +munici pal +iam bic +radi os +ne vis +dura bility +mc cla +horse back +inst itutes +ful fill +atta ch +ate ur +ak an +resi sting +illumin ation +hand le +hair care +om ent +macle od +ka iser +g no +bear down +ly f +gl omer +distor tion +z m +san k +roo sters +is now +as ports +ag en +wo ken +st george +ro mper +my le +econom ists +ru to +t will +health and +d ito +ws l +tair p +pra kash +mic heal +h ts +w rights +kat su +fioren tina +defen seman +d itch +var sity +texan scheer +ba ham +sc anned +we il +seduc tive +ðŁijį ðŁı½ +fu e +er win +dav ison +ter ran +moo ds +wool f +re source +@ . +cu sh +ðŁį ° +regre ssion +cur led +la zer +jo anne +ab bott +mo z +down ers +mm mmmm +valent ina +k hair +dream t +cro ok +che k +ste aming +nephe ws +cl eric +as ober +indefin itely +w ye +us news +joy ce +flu shing +wynonna earp +ron do +kis s +hot dog +bar ns +sax ophon +far ley +gas p +decre asing +al way +pe x +l sd +shi ft +p outine +ra zz +rescu ing +ni ko +ho ch +cc l +u aap +n ts +m car +il wx +conqu ering +ket tering +stur dy +delay ing +sto k +vani shed +cath ar +bin gham +in v +ic hiro +he mo +budge ting +[... ] +be ss +sebasti an +slow ed +ðĿ ij +musli m +stun s +acton climate +ve a +se ton +rose tta +oun t +hard in +flu id +ca w +ðŁ¥ Ĥ +yach t +un l +sp hy +provoc ative +or ic +is back +__ _ +nicol as +gy an +loo se +fl in +reb ate +: :: +! "@ +com icon +she ff +down stream +chic hester +beach life +mom life +diabe te +ar ra +van e +ok u +ye o +man go +try out +app ell +he irs +arjun a +dd u +na veen +movi c +soci alists +s back +criteri on +soyu z +k her +da z +yol anda +wine oclock +re ina +one w +leon ard +en dez +u bs +support local +facilit ated +carameli zed +b pa +vuel ta +my tho +m ami +spe are +nbap layoffs +fe vre +nick jonas +im print +c so +craig slist +la salle +gi deon +ha doop +dis regard +w ud +tu c +ma gee +acou stics +ta a +qui e +pol a +cr t +dw yer +dis sec +capit ol +men tion +kn oll +he igh +fin ders +plac ements +l se +indi ra +gur i +madhuri dixit +kingdom s +iambic pent +geor gina +je ky +conflic ting +bay an +aga tha +uph old +dr on +vic ar +ex pat +periph eral +pe ssi +fa f +ance stor +? .. +wid get +pun c +comm enced +beav s +air waves +ad dis +po a +de sses +co den +vu e +ru pee +kar in +spo ck +m sy +ภ° +pr ick +fill more +ti fication +thing sto +sar de +em ile +pere ira +n ad +bright ening +arre sting +wo king +usc g +sp ill +raspberry pi +hu go +ite c +is ma +cuff links +optimi zed +oc c +mi wx +en ka +el ited +afford able +sa kh +coron ado +ho h +at ul +ai oli +jim cantore +accoun ted +vin ay +her mit +groo ves +ran ch +r illa +we tter +ou tof +veter in +ni kov +ki an +fair banks +ram apho +n iti +k ko +ru sty +ne stle +tv xq +shahe er +âĿ¤âĿ¤ âĿ¤âĿ¤ +penn ant +gem stones +dem debate +ðŁIJ Ĭ +auton ews +support indiefilm +mach o +ve x +new sat +ne ti +conce ssions +can died +yof the +mac au +den ds +cricke ters +san iti +mari ano +gh at +ar toftheday +¡ ľ +e gos +gen oa +chat bots +bri er +al labout +mon ty +spi ed +r tr +comfor t +sni ppets +real time +gra in +exam ined +en lightening +tt u +god bless +release the +sing ular +ki ans +ha ka +sor ren +defe ct +mar g +equ ities +d orian +su ka +per l +aishwar ya +pul lover +preci sion +fair way +ne ve +rive ting +vill anova +en com +ak o +passion ately +europale ague +siem pre +x vi +enligh tened +c fr +âĺħâĺħ âĺħâĺħ +wast eland +is f +new comers +emergen cy +amphi theatre +- . +text books +figur ative +tre mb +pe sc +ab hin +ab bot +ac acia +har ds +por sche +kau ai +el isa +car rick +abo u +elli er +be ch +neu tron +galap agos +ru ben +in nis +how to +nun s +sab ine +i ac +clin ched +no tori +fi ves +cairn gor +per i +gr c +ðŁĴ¯ ðŁĴ¯ +mal m +twelf th +di ff +rout ines +marty n +lin den +synthesi zer +nu mber +game cube +fal kirk +byz antine +queu ing +gr ill +scal able +char red +rou ting +her bali +gri zz +ðŁĺŃðŁĺŃ ðŁĺŃ +tol l +termin als +l pc +ab d +war mups +remo vable +¯ \ +vi go +pap aya +ne ve +lov ingly +jo kers +ib les +sse tt +poten ti +pel e +gi gi +sadi q +leg acy +son o +ru pees +retar ded +ele e +par r +fi ance +ey re +say ers +pend ants +mak nae +al bans +adap ting +p ff +pu berty +ji u +ing rad +hypocr ite +diplom ats +phys ical +rob by +bon sai +ãģ · +f att +catal unya +âľ ĸï¸ı +ro ma +more land +so e +conver sions +stl blues +shol m +gra ssy +pra do +on u +assaul ting +> _ +sett es +dis graceful +aph ra +âļ½ï¸ı âļ½ï¸ı +ठª +kil n +goal tender +s ru +philanthro pist +b als +th n +stu den +sando val +dogre scue +eli ons +asse ssed +lar go +hec tares +sh rm +sa if +cle avage +no ches +n ene +fat alities +cur ing +clean ser +al es +p vp +south bank +pizz eria +marsh als +kni fe +an dover +tbli ghtning +sr sly +ou te +digi mon +timesof india +prome the +le bo +f su +wit z +rever e +man as +mam ba +ch ica +gu an +exhibit or +csr racing +d ere +xx xxx +gu sta +story time +ston ey +organ ics +and u +se am +min ogue +anushka sharma +ab a +ðŁİĻ ï¸ı +ugand an +chro matic +as sn +document aries +sh t +ru paul +loy d +k ats +e us +ite ch +me dusa +pan ty +kel logg +et to +talla de +sha a +do st +p ms +mari ana +je ster +croo ks +ðŁĶ ¬ +min danao +ind hoven +ðŁ¤ ª +le xi +tv n +jan is +co te +ãģ Ĩ +ser rano +iw m +ðŁIJ ¬ +k ke +distribu tors +cap u +counterfe it +camp site +ag gie +ðŁĺ ¼ +chhat tisgarh +~ @ +state u +san di +prevent able +cl s +can ne +mm c +i ver +sa haran +pal is +night out +do s +ap ia +absc bn +manag erial +aro se +mo wx +aro sa +ðŁĮ ³ +under dog +remo ver +astronom ers +lent ils +su scep +smoo ther +pend leton +fau cet +e mory +dal mati +af cb +tic us +exem pt +en rol +d heim +ðŁIJ º +restric tion +star fish +sto w +snor kel +thunder birds +she ad +homo sexual +dy n +as li +andre tti +dou che +dom o +tar mac +slu mber +pr onto +first dayof +mini ature +mari achi +argu s +recomm ending +mobi les +in ce +illustri ous +or c +adver ts +gr its +wea sel +pag oda +over pass +gre ys +maxi mus +arma gh +wood land +sun ni +ðŁĴ ī +ë Ŀ +ti one +soci o +ho s +ðŁ¤Ĺ ðŁ¤Ĺ +wind sor +subsequ ent +munch ies +id h +exclu ding +e mi +cu th +z ai +week days +law suits +barn ard +Ø ª +pe tting +net es +mul ligan +pharmac ists +ra quel +e ton +cran ston +gil ded +cle ary +ce ph +ra a +pam per +lombar di +as in +sher ry +pro d +for te +ari anism +buffalob ills +æľ ¬ +ðŁĶ¥ # +uu u +just ices +car ina +nat in +mas low +dro oling +cog nac +cam ber +el ong +r dr +in en +convic tions +am use +tro ck +harm less +visit ation +gen omic +bl and +beno it +chim p +tuscal oosa +gre asy +x po +gil t +se q +per mitted +christma seve +book s +mu e +old school +human right +be ati +ðŁĶ Ŀ +sh at +sculp ting +h wan +fern andes +sci utto +fu entes +endeav ors +maid stone +un paralleled +shou ted +queen of +mer c +band ic +ve da +sel angor +pi le +ja han +intimid ating +disapp ears +cl ich +za ha +w urst +hi v +fod ils +cor dless +aaaa aa +hy dra +bel inda +e els +bu f +su staining +rugby league +no c +brig itte +( ðŁĵ¸: +tromb one +soo the +smo g +ad p +stab le +ing ley +diagno se +ms g +we ss +tic keting +one e +nsw pol +e up +auto psy +adity anath +sun down +river front +si ya +p is +hier archy +dur ango +di jk +ren shaw +he aps +epide mi +david bowie +interne tof +dd i +nation ality +mb ar +air y +win der +w alia +elli ott +c x +bav arian +pl att +an tw +wi wx +sof ter +ne ha +h eller +th and +dani ela +bo ast +degra dation +ðŁĴ¦ ðŁĴ¦ +transform ing +man e +av ut +ðŁĺĪ ðŁĺĪ +vo ter +the e +t ate +pu ff +in door +sop roud +boy ce +boris johnson +wait in +immun ology +ðŁıĨðŁıĨ ðŁıĨ +âĿ Į +street food +liz asober +cavali er +c elia +need le +motor ing +g ato +, ) +ra de +harve st +t ms +jar pad +on ey +air men +v re +impair ment +abhi shek +snoo p +l ant +fam ously +bl ou +s ze +g ander +un touch +tu f +dee jay +col lateral +b ind +ðŁļ © +pin ning +ic n +' ; +the economist +ul tram +worldwater day +ti poff +the i +feed ers +campa ign +sc umb +day weekend +yo m +pe dic +h ough +ps v +pl in +on de +boston marathon +az zy +* _* +con ley +thi ago +hoo o +gal erie +luci d +je tt +gl itz +final fantasy +achiev ers +y ung +peregr ine +op hi +dam es +biom ar +âĺĢï¸ı âĺĢï¸ı +sk c +l ics +fl ank +ar rahman +ho of +uphol stery +t ats +wo z + ¿ +snor ing +ra er +l ju +ap d +pl ating +kan u +im ation +fragr ances +m ra +mor ay +mo tt +im muni +hearti es +bho pal +tim ers +g ata +color way +car nation +win get +si ghs +s ville +optimi st +chate au +olympi ans +ci o +singer songwriter +ny o +fi bers +bur ch +ag ro +mil ne +ig bo +cr amer +ation als +dan ube +pad ma +nor mani +en forced +bre ck +boeh ner +ar den +sur rendered +pros thetic +om a +ha iled +calcul ations +w fa +bi b +fcb live +fon da +west coast +que sts +friend ly +to wie +fit ch +bal ot +star dom +scrat ching +ho sa +thi ka +o ven +stro ke +out post +pharmaceu ticals +hi kari +mu y +af d +fallon tonight +squ at +or u +dra ined +chocol at +ë¯ ¼ +wor ths +ri b +mu j +that s +residen te +it el +boo st +mi gos +mul led +la a +etsy shop +don keys +me k +p tc +flin ders +e hs +ro hit +mu ir +g ad +compos itions +åĨ Ļ +combu stion +i kh +yemen i +wav ed +gar ci +ak os +oo ds +fu sion +se que +s lan +pl ur +kic chasu +shenan do +s ams +worl den +horo witz +with me +mic robes +k ki +ðŁĴĶ ðŁĴĶ +w su +patch work +fre er +y aki +the art +symboli sm +mil er +bt n +ma bu +side kick +motiv ates +sag itt +natur als +serv iced +ps ori +pa ola +qu ig +i badan +gi ggs +ë ³ +sciento logy +si oux +salam at +d res +cad bury +d hawan +ci ón +_ ' +swa pping +maris ka +james bond +explo sives +ay les +af er +s agu +cen sor +tom a +jeff erson +ring ed +par tist +ir responsible +aguil ar +vac ay +equ itable +altrin cham +ac ur +man ish +ger min +schoo led +pu tter +ed ad +nav al +toast y +sol areclipse +dish u +coy ne +ac co +mu ck +mar an +el os +len der +cro ix +worth less +ha ber +gun men +ðŁį ĵ +zen ith +t enders +hur st +hol tz +itali ans +car low +u cd +characteri stic +bun g +av l +u th +sa sia +rs l +red man +neighbor ing +green peace +sti ps +follow party +y gk +en os +omni bus +na issance +chri ssy +secu re +call back +ji hoon +memor y +block er +l anta +daf fodils +bil t +ffer ty +fau st +ie c +nipp les +so g +m nd +jagu ar +bol dly +ab poli +pro position +gun sense +evan sville +cu tters +we go +dou n +do x +stal lions +ka j +shi ppers +j awa +vol o +le ven +pap rika +kov ich +jor di +induc tees +app alling +dial ysis +allevi ate +âĢĶ âĢĶ +pie ter +mid wi +q tr +juli ette +inter mission +haw ks +act ment +one ill +k lin +vam ps +fam ous +cou ld +autom obi +da an +west end +elli p +nh c +mel anch +web series +ton gue +snat ched +smy th +tan gible +sl i +e asing +bar stool +over lay +afford ability +ting ed +ter as +ay ush +wanna one +rh ine +dan a +sh ana +kend al +fer tile +w ir +repl eni +lar vae +is ro +con vos +ab brevi +u cc +hun gry +bur rows +ag er +nav i +mat in +du per +cer n +ma don +ķ ï¸ı +é ģ +tu ps +hy att +sh ep +friday night +wis er +hei di +hat ton +p gh +foun tain +wrist bands +ahmadi yya +aeri al +subscri bed +so los +m ace +sla yed +for fe +dul ce +christ mass +arun jaitley +viol ate +ob stru +ni eces +w vu +idy l +fa ze +pre serves +infr inge +premi ers +inter vals +agen cy +( © +stand alone +di mes +bo er +param eters +ge tit +ðŁĺĺðŁĺĺ ðŁĺĺðŁĺĺ +tu lane +for given +scol l +mb ps +smash bros +rob bi +prima vera +ali st +ghost ly +ay at +ye ats +impre ssionist +ear phones +caul field +wai kiki +sal ute +sc ou +mu ay +louis vuitton +bak hta +ado g +inven tions +hur d +forec lo +stream line +thalai var +ch snews +will ard +t sn +euro parl +cru sher +my sore +gro wer +ra ping +pat ti +g den +sm w +muf ti +kid man +ab r +soun ders +skep tical +ðŁĶ İ +sun dar +i me +fer g +feather weight +ar lington +pas qu +ag azine +wearab le +nati c +mccl ure +inter mitt +hor de +six ties +car te +bha v +ze al +experi ential +ador ned +som mer +eno te +hypo thesis +stin ky +pro to +dead lines +vo gel +mus ings +monc ton +gu ter +f le +aci on +voice of +ta sha +inhabit ants +type face +s ba +bts x +ðŁĶ Ĵ +wor x +u hc +jo ko +cell ars +gor o +continu um +... & +weather cee +ha p +sr k +ris ers +lonely planet +un named +co eur +ðŁį Į +the world +ili ke +fa sten +ami go +ri ba +ramapho sa +staf fers +had ley +? ?" +fi ore +sal ut +hu ff +bez os +Ñ ĭ +ra der +kam ala +in line +fill ers +um atic +all in +shat ter +re in +o ku +ch ases +fla gged +baby metal +water stones +ts b +cut out +op hel +aam a +rockab illy +sto lic +jet blue +ich ick +down ton +uzbe kistan +pat na +la q +gr ange +) _/ +subsi di +sc p +newsc ast +it sa +twee tyour +e mor +archae ologists +uni fication +por ta +q x +protec tors +pro hib +charis ma +car tag +ren fre +scul pt +guwa hati +de ma +boo p +unf pa +dex ter +lay la +alleg es +sou ps +never again +l ys +cal c +bar oness +visu alize +ger ber +absor bed +i ers +a han +fon tein +detec tors +verst appen +sv c +formul ated +ac dc +li x +in competent +bh k +lour des +water house +snow ed +appreci ative +sig ma +lizasober ano +pen ned +pay check +tall inn +fanc afe +par isi +av alley +vi g +ru fc +hard ship +so cute +po ise +ì ¹ +roth schild +k ly +???? ???? +l hp +il ay +f hs +am ad +ide als +brad bury +bal boa +nic ot +kid nap +wol ve +tas manian +op t +matthi as +ãĥ³ ãĤ +super markets +mylittle pony +me lee +li ster +gr oun +fe dora +kind ness +en en +bra hms +¯\ _( +ros well +mar lene +ic u +re formation +or ail +he brides +dispar ities +terrac otta +swal lows +re id +influ encing +flu or +den e +tum our +blon des +thunder bird +sh eva +moga dishu +ka b +cre eps +i ving +ene ed +anno y +âĶ Ģ +intri gue +enqu iry +ar aj +tur al +kuber netes +end lessly +divi dends +tor a +ti sh +commemor ates +un ra +tri b +pon ty +ne m +diss ent +brew ingco +ðŁĺ ½ +nor mali +bi of +( ... +chil len +ì£ ¼ +mell on +av is +mccor mack +ing ra +enrich ed +custome rexperience +testo sterone +snu g +sett i +ger onimo +inqui rer +bre aches +very thing +bloom ing +mu ra +dispo s +bi de +de va +shade sof +in trin +sh ev +s ven +nayanth ara +gan esha +c ws +ber ta +label led +use um +nick named +ma han +car uso +ap ur +ðŁij Ĩ +w q +orphan age +discar ded +mag nu +lu e +je on +bridge port +pac ing +mercur y +( ðŁĵ¸ +marx ist +amphi bious +transplant ation +stit ching +then burg +gradu al +ãĤ Į +ro ft +ma ils +ine c +guy ana +dopp elg +ver o +re write +head less +harb augh +gate way +car sforsale +sw i +st is +mach t +un de +sura baya +stap leton +nur turing +mil ner +ya o +lma oooo +ko sh +arsen al +k ame +er ry +ar royo +dis misses +ru bbed +rc b +lew d +dil u +and or +vi de +ur in +inter sec +ha ar +al b +year swith +app leton +é al +ul livan +suc cu +monter rey +d mx +artem is +ron nie +farm land +s football +gro tto +anth i +ãĢ ģ +à® Ł +vid ya +jimmy fallon +ൠį +t zer +gravit ational +w thr +u hhh +e hr +tin ker +ti juana +scran ton +ram charan +bar clay +re van +m si +ka p +wr s +we thenorth +tor al +sat u +gro m +fac ep +erick son +z yn +se dge +oo dle +spur sofficial +ds p +sic ilian +soli hull +recei vers +ladak h +hend rick +ther i +presi ding +mc guinness +litt ers +gun nar +gh oul +wi b +n tv +kar o +fro ck +b lau +ampli fy +all is +ul lah +memo irs +kh loe +intercep tions +pet day +lo oney +con fin +ch ay +piyush goyal +frequ encies +ut z +event ual +warm ly +obli vion +an ka +ta it +âĿ¤ï¸ı . +director ial +ru lers +prince s +mu ck +stur ridge +deu ce +abri dged +bagu ette +un cles +pen du +min ding +forre ster +av ila +wall er +wall street +ment or +hin o +high way +crom well +fanart friday +mb i +co yle +a hi +tro ve +spie gel +pay tm +mcin tosh +jan sen +nit i +nash ville +len o +leicester shire +le gos +dic t +ðŁĵ ½ +sp ad +beverly hills +sy rah +separ ates +z ain +un fit +dra gs +tan ia +over flowing +hri thik +haw thorn +z ani +mac far +fi de +to tem +pe ds +fundament ally +cal ico +sin ner +j ä +hil de +ds d +ten ay +ta hit +mil f +lie b +inform ing +up lift +ra el +mortg ages +lec t +ii ii +guillau me +compos ites +old smobile +l end +gar th +com mish +bapti zed +scorpi ons +ru cker +bringback our +alli ance +thalap athy +tal i +sp ans +eri dge +wither spoon +lin da +sky lar +kor n +hom s +Ä į +sil enced +caf fe +ar ty +dist inguish +to wed +pun g +jessic a +ear nest +beau fort +t ama +study abroad +si khs +new bie +nav ratri +mar ble +loun ging +lit ter +dal it +so sa +iz es +gra de +com promising +tr iton +de tta +v j +chau ffe +spec tral +powe red +montess ori +artic ulate +hal ton +al co +ye y +mn twins +acoun ty +ðŁijı ðŁı¾ +âī Ī +mad men +kal a +gru m +chi k +ati s +su me +akh tar +job search +high lighter +bo ath +âĦ ¹ +tar zan +lam bo +âĽĦ ï¸ı +ox fam +dump ster +pretz els +mac os +incl ined +fac tual +adverti sers +shu i +pu ree +ml pfi +anti dote +cap o +pa str +merc ado +but ton +ar min +ag g +lol la +horri bly +er rands +christop he +time snow +monday motiv +li ss +scand als +mc i +dispropor tion +âĺ İ +sur pass +samar itan +so tho +pu rest +fl att +trivi atuesday +delec table +leop old +hermi one +chou dhary +en rich +¡ ¡ +subsi diary +ine qualities +bachel or +auto immune +la kota +i hop +ad jec +the simpsons +sh es +se k +gret chen +up stream +hin akhan +coper nic +x tina +lu g +tough ness +e ad +cli pped +bi us +sl v +fah ren +dee pak +ca u +x an +im mature +dig ni +bo bs +shred ding +but tery +accommod ations +de ven +chun ks +super league +sky bet +kil dare +je et +ë į +ce k +wrec ks +pro pane +oh l +tb d +quo i +trum pp +mi mo +reluct ant +ver ne +o ic +ma gh +ar nau +se ver +li dge +stair way +kicchasu deep +ðŁĶ º +mach ining +aama admi +ot i +c da +al it +pan y +inst alls +ac ct +e shop +di em +hard well +fulfill ment +sc afe +qu ack +extrac ts +swee tened +fi ghton +f di +d inger +wal tham +us ur +refe rees +seok jin +gran n +af rin +th n +sch af +par cels +bet is +amar ine +nom an +kh tar +mor itz +cou pling +bar ons +ðŁIJ ¸ +à ¸ +sl p +sad ler +x ander +tri ad +mc millan +kh z +divi ding +ìĹijìĨ Į +dar yl +zed d +le ys +pla ques +flu ori +tipper ary +on nell +di dier +lang ford +im c +the sun +bir dies +ar cha +ye ssss +t di +dar ia +cand ace +al tam +pal aces +ch it +sant am +event ful +book of +ad b +mon stax +cre ole +co el +âĸ ½ +we aren +sten nis +she ath +ati sm +gron ingen +mlpfi m +le pre +wrong ly +rsp ca +rendez vous +acknowle dging +pel vic +solic itor +sla ys +nue stra +lo d +is lander +fer oci +fashion show +ra ss +dge on +adole scents +sma shes +negli gence +grate ful +ved ere +sw oop +ing l +apol ice +vand alism +gan n +jo ao +di supdates +zimbab we +under age +radi ance +w of +bour geo +pla s +cr ani +gh ue +wrec kem +warran ts +re form +jim mie +at wood +ys l +neil himself +l bj +i man +tan to +nois se +ver bs +equip o +al together +mam ent +l ice +dou glass +tier ney +pri med +j hal +furn itu +braz ili +v ill +past els +n ison +u ff +paral ysis +jay e +im po +ðŁij ģ +strate gically +pakistan is +was sup +super bike +thank u +tru elove +sha ikh +israel is +vi p +to g +li en +la ker +grey hounds +cul ars +bian chi +balot elli +ar ran +loo s +str ates +he bron +ar vo +sunder land +the al +tomb stone +sand man +c pac +thanks giving +love him +lat ino +an in +aka if +ĭ ãĤ +tor quay +di est +alli anz +ðŁĺ ķ +golf club +cl lr +wal cott +sch nau +promp ted +nomin ating +len nox +val et +mon ro +may ward +e ph +ðŁĶ Ķ +inter oper +r da +re flex +arm chair +ê° ķ +stri pper +por ti +ph arm +ham za +ni reland +ne ue +h pv +port foli +sun burn +fris bee +be al +bapti ste +x h +ty m +pr ati +o vers +haz rat +deser t +der ry +us ky +em mett +ach arya +)_/ ¯ +shu d +may a +ham ill +ra im +nr c +fitt ings +cur vy +ðŁı ĩ +ster ling +ॠĢ +wal kin +short cuts +mil ly +ast ur +alpha be +pl i +pe z +miss you +rad ford +ml g +ta eyang +notjust lakes +du mps +seren dip +le ur +ra ving +e ster +de priv +absc bn +ðŁijĩ ðŁı» +scar city +o cr +mean ings +cap t +da hl +fer mentation +bri oche +to win +out lander +massi mo +en cro +ðŁ¥ ³ +buil t +po tam +kir i +tm w +monit ored +k ites +peoples vote +gray son +íģ ¬ +afri ka +a dies +i vote +gy ne +g annon +di x +c mc +ou ral +fox andfriends +bel i +ig ne +gl an +katrin akaif +co politics +qual itative +p si +lu cci +disc oura +âĺ ® +kel li +gau tam +carac as +reale st +pu la +in us +hill top +make aw +atten borough +tw y +r arity +peck ham +ma hon +corn elius +clin icians +ton line +tb i +paradi se +ka si +inev it +fresh ness +colling wood +lun atic +defen se +cop d +in fra +wain wright +sains bury +alab am +te ma +lac o +chec ker +releg ated +tren t +stal ks +huff post +bhubanes war +ast ral +share your +prim rose +hi me +cat an +end ment +en dow +cle mens +mal oney +hil ary +game time +den ise +collabor ators +b wo +radic als +gue tta +ici on +au a +snap matic +sat chel +excav ation +base man +s ão +gn ation +fel d +surve y +shah zad +ma st +anirud hofficial +tru cker +ot ago +geo graph +ethe l +âļ¡ï¸ı âļ¡ï¸ı +s ver +mu tt +internetof things +ancho red +wh ouse +bang la +bal main +ç¹ ĭãģ +break fa +á Ģ +twi ster +te tris +ca v +stag s +g z +au b +stor med +hel ens +yar mouth +st asy +gustav o +co sc +vin son +up p +sc ricket +assump tions +app e +nu h +u er +pre mise +n aga +e amon +coron ary +na f +north side +el mer +ro tar +out lining +el f +re surg +kat elyn +in can +hyster ia +ce e +am bani +pro lly +Į ãĤĬãģ +ax es +san jose +rem brandt +mag pie +even ly +scor sese +qu aint +f g +b buk +indian football +weare all +spd wy +pis ces +ec g +âĺħâĺħâĺħâĺħ âĺħ +pre orders +: | +ni pple +sal azar +ju me +jail break +min n +bas sett +ze tta +jef free +ad jun +tic on +san diego +drink local +chol era +solic itors +o bo +com post +ni an +wr a +tre ach +ic ic +profession al +del ve +leg ate +histor ia +cro issant +con noisse +nam o +palli ative +chem trails +i ority +global warming +comic art +behavi oural +re sted +li as +cli mates +Ł ãģĦ +rut land +nou rish +menopau se +hot ties +demen ti +ve spa +mel ville +anal ogue +tz man +str ung +im perfect +gl are +cir cling +ros berg +rec o +oc ity +lo ire +em be +do ssier +ne el +nan do +me a +gal vani +fin esse +ag p +berke ley +asi m +âĺº âĺº +quil ted +ish ere +un matched +po tion +for z +at re +selfi es +juli ana +ðŁļ ¶ +âĸ º +mel ton +âłĢâłĢâłĢâłĢ âłĢâłĢâłĢâłĢ +spin rilla +pur cell +ed p +at leti +tony awards +ra ja +pro gno +mol ten +stu ff +p ally +nobel prize +âĻ» ï¸ı +spiritu al +spe ake +sa sha +bri um +tru ss +critici ze +assassinscre ed +yor uba +u lo +fire man +workin progress +ef cc +fla res +ro bot +hi kers +cl l +shado wing +pat sy +leh man +c ns +å ± +guad al +à± į +ra pe +r honda +paralle ls +son ja +langu age +land ings +z ola +cr amps +bur ning +apprais al +jol la +ham m +kas a +gul ly +f go +uly sses +ri be +ðŁĴ Ħ +ib u +eti enne +bri ar +fin ely +comb ating +y ql +go tham +we chat +to paz +primar ies +l se +iz z +hel e +dispon ible +cy stic +bel ichick +th rush +kansas city +ge om +soli di +red bubble +by stand +cambridge shire +par fait +ast le +ow o +ind ore +stom ping +sm elly +ðŁ¤ ĸ +locom o +adm itting +hol me +clock wise +min sk +mc co +for get +ev p +cam ra +ab ella +yo tes +universit yof +mé xico +silver ado +ric ket +crom bie +pu j +eradic ate +deli ght +y go +glam ping +vic a +du ggan +coun ters +cf d +sc our +react js +pu ram +paras ites +in ki +vill en +stel la +li mbo +ang as +k cr +ðŁĴļðŁĴļ ðŁĴļ +vap ori +mum ford +oli gar +à ¼ +al oo +boo ties +ad r +k elli +dru mmers +av ici +nature uk +ron al +in trac +un splash +le che +g oma +el ine +envir o +bi onic +bu eno +mi k +av in +star ling +em powers +cake day +boy cot +ðŁĴļ ðŁĴļ +ðŁĮ¸ ðŁĮ¸ +v ach +m ci +fractu res +ger i +sk ing +exclu ded +lu ce +ja ve +ig gy +evi den +aki stan +a wn +mor als +luci fer +ha ban +tumb ling +sunday motivation +mo sley +captain america +sch icago +the one +mo td +d ts +ðŁIJ ¼ +rep ell +ii i +locu st +geo spatial +mer sey +immer se +desc end +ber nade +j s +boat sales +win der +cran k +sing leton +candid acy +ben a +ðŁı» âĢį +high lander +ol t +k prs +healthy lifestyle +four teen +end the +ith aca +circul ated +r ans +pre valent +ha vas +splend or +roo ster +kalamaz oo +jewell ers +enne dy +rou sey +es y +cann ons +ornam ental +// // +ren don +win ne +mol ding +eid mubarak +coun tess +simon a +ha wa +fo es +du ster +sb u +por tray +mar ries +goo dday +cho co +achi ever +ðŁĺ¹ ðŁĺ¹ +pre neur +tr amp +tom i +n bat +garden chat +farra khan +ever glades +ab ru +sou sa +se ce +homes wee +terre strial +bar it +sri devi +ol u +mel inda +f rick +can dies +ðŁĺŃ ðŁĴķ +qu reshi +family fun +exor cist +cardin al +ny t +dies el +cu mulus +capric orn +si ology +lor na +dou gie +an die +super sport +c fl +п ÑĢи +say ang +pe ek +ภĬ +lo be +j em +ing lis +gg led +c sn +amne sty +chu ps +ba es +sau er +ðŁı IJ +mongo lian +en et +back street +dr illed +acce ssing +ce o +b se +ai ken +pur r +wor sen +whe res +war k +testi fying +bu ri +bla st +aw g +ðŁĵ ĭ +re defining +hear ing +u ci +c mp +bon i +tail oring +ta ji +noc chi +em t +stephen king +ne et +compla ins +campaig ner +luci ano +twili ght +ti esto +pas sports +flo yd +cathe dr +na ked +caregi ver +b coz +ade cides +ku ri +ly k +br aries +dren ched +disc lose +ðŁĴª ðŁı½ +le blanc +je tty +gar ty +chip mun +b su +rhyth mic +ic z +fri d +anne x +ame x +solo ist +lanc ers +arro whead +speci fication +simul ated +na is +inver te +bo wing +wor ship +f z +abo ss +sha q +ì¶ ķ +challeng ers +an arch +aamaadmi party +ãħĭãħĭ ãħĭ +suffol k +so corro +sn ell +cla dding +absor bing +shaw a +particip ates +ðŁį Ķ +book stores +bak u +seap ort +ko jima +gab y +pack ard +electr ician +let it +mo wing +fa wad +young jae +hot mail +men ing +u rie +intim acy +con ti +: ") +lifeis good +in ciner +i dri +craz iness +jour nos +fran chi +bott len +al da +ff es +k x +south we +air a +clay ton +sco ti +f j +bri ga +ðŁ¤ĺ ðŁı» +demonstr ators +y z +stor k +na q +casc ades +travel chat +plat a +pad ma +fran ci +at tain +bat girl +lom bard +hoo s +d dos +neon atal +discla imer +r ss +r ant +di sen +tex aste +so cal +frac tal +cam ry +stri fe +sn acking +mu h +sant ander +mor ons +gra f +par ades +hu ston +dru pal +mi ento +kir stel +hy de +vom it +forti fied +sphin x +da v +bir yani +win nings +s baseball +mer ged +lovel ondon +ling ering +dream big +car leton +liveli hood +djan go +astri d +gri ds +down e +bru ised +s ne +scarec row +hel ium +f nc +bi ggs +an ter +restor ative +em pires +ab del +life style +kiwan is +colloqui um +me en +pr ick +anti que +ze b +mi mic +edmon ds +ðŁijĬ ðŁijĬ +q ing +pp el +mc gill +interpre ting +âŀ ķ +rash ad +do ka +narr ator +electro magnetic +ash by +sau ra +iran deal +âģ īï¸ı +krish nan +in di +ff en +bre a +os man +multin ational +chi ppe +recruit ers +aus biz +p ounding +re gen +cur sor +refu sal +mac s +in ak +ax ial +wa ifu +up cycled +hindu stan +cas sini +carly le +scrat ches +re ef +man atee +eat ery +ðŁĵ ¢ +un condition +sen pai +on ther +comic book +pro sciutto +de mar +mi se +ma ge +fre ec +aye sha +al der +android games +ley ton +ho ck +door way +chicagof ire +aali yah +sw elling +bi x +. ðŁĺĤ +evan kirstel +torpe do +kon stant +genevie ve +ma ia +ha user +do torg +hide ous +fi k +sp raw +e ek +z appa +wan dered +' ' +ra jan +bam bi +( $) +wid ening +tool box +sa ir +illumin ating +pra ys +out patient +i w +day o +lo b +sw fl +sha des +gu ms +coo kin +ko di +gri ffin +traum ati +ste a +slaugh tered +god bless +air time +pseu do +b sa +hau led +ar if +à¸Ńภĩ +le l +wc po +mil iti +char ters +worl da +ru k +k gs +digital india +is able +idyl lic +esp ino +marie tta +e bo +team canada +ab our +wil ton +rock stars +fav ored +phys ic +wrink le +tb r +d print +ball arat +ad al +z ey +ðŁĺį ðŁĶ¥ +tom lin +mt r +pal sy +fener bah +tight en +phil ia +ir oning +ry u +b ant +enqu ire +ca ir +abur ger +tru n +green berg +chau han +ir ina +sh ani +trend setter +pre tt +zaf ar +alo ve +v ici +pan ic +no o +lu stre +disrup ted +bal lis +son sof +mon si +inst ac +ake st +ëĭ ¤ +kw ame +horror movies +distric t +sau cy +mb an +ar mies +with drawn +med ics +loft us +er oom +be kind +ar ns +all on +un ison +davi ds +cr at +nicot ine +so or +sm x +on co +cospla ying +zombi es +har ms +e ger +ro sy +moon shine +fe in +ce tt +du brov +reg ents +ben itez +ðŁijıðŁı¼ ðŁijıðŁı¼ +ste c +m alia +prioriti ze +ic eland +ft se +v amo +lam ont +homo sexuality +bre es +regu i +cb p +te j +sky sports +deter gent +sha sta +de rel +conserv ancy +colori zed +accol ades +vis o +show your +nan ow +bice ps +us ability +bi m +dailys ketch +pearl jam +stran gest +mega deth +broad casts +bar ren +ar ton +chri ss +confi gu +lu res +is the +e ul +railway ana +global health +gi anni +u aap +s lum +consci ously +ab re +n up +bud get +v ada +e sch +real ness +er ased +th unt +be z +armist ice +ðŁij ¹ +sh run +o led +driver less +ðŁ¤· ðŁı»âĢįâĻĢï¸ı +won dr +sk an +sal aam +mother land +h wang +gen o +gang nam +tw right +endor sing +en ic +ador ation +pau sed +patric ks +do cked +plat te +ff xv +ethnic ity +auto show +side show +after life +re located +orphan ed +food network +dare to +and ra +sla ps +v live +swim s +re imagined +mist le +re vise +real ity +bhar ti +ðŁĴĻ ðŁĴĽ +late st +prou dest +gra sses +lan yard +fresh est +carcin oma +anom aly +zieg ler +sum ner +ly rix +gor g +is d +av el +swild life +me squ +john cena +euro league +sab er +master ful +yar ra +cogn ition +jacob son +abo lic +sir loin +shuk la +moj ito +su pere +st weet +me z +e sa +rudol f +gur a +where you +tt m +win s +trust worthy +ny k +bra den +table top +good food +es on +be k +lingui stic +gra ys +ch ath +h cs +mon i +de ans +cu ssions +ch ell +slo ws +he mi +d app +shar pie +boo sters +a os +str ack +se dona +mu eller +hard wick +or nate +thor a +sal ud +o twol +ch um +mi ho +for age +thel ittle +tear ful +ones elf +min dy +sm g +gmb h +emer ald +ðŁĶ´ âļªï¸ı +tu tti +recep tions +re vising +i brox +tope ka +sal ami +expan se +i books +dob son +cli o +at s +ðŁļ Į +mo ha +is ance +shu tters +moo t +jan ine +marvel comics +jor dani +pos er +kenne th +hy ung +de ja +ase ball +speci ality +eu ston +classic car +had ith +ðŁIJ ī +chas ing +iz o +gros ven +ag lia +thisdayin history +t row +om ile +hu ar +by n +sal ine +div ine +demon ic +ty ran +han dover +revit alization +pa ella +cryp tic +se dg +m end +dun kirk +bre d +wal d +sport scar +a ard +whe aton +da ener +k lan +br t +bakhta war +spi res +schu bert +ro ti +poli sh +o se +ag ame +wonder con +prote stant +bo sa +ðŁĺ Ł +d ü +joy ride +ger trude +âĿ Ŀ +gil a +v h +tw a +tra v +swal lowed +star ve +la in +ent ren +rei ki +su kh +cra ic +az u +web page +kee fe +hypo the +hir sch +hel le +camp ground +w amy +tra vi +sha hi +san deep +ru i +han uman +dw p +reposit ory +no or +no ff +un real +p ell +black history +har vick +ma scar +pay ee +pa sha +gastron omy +d ÃŃ +ai g +rosen thal +open day +embelli shed +t tip +sun bathing +go pack +end ome +ï¸ı # +invali d +final four +st fu +squish y +ra sta +mo sch +jam esc +die trich +sel a +mel b +el vi +t dp +sun i +sli t +j ha +bi za +spi ked +l li +l illard +vam pi +syno psis +az har +kendrick lamar +ĮãĤĬãģ ŁãģĦ +heart less +country file +air play +arrog ance +pre e +virtu oso +ãħłãħł ãħłãħł +raj u +le bu +for ward +tu g +dro s +mondaymotiv aton +concep cion +thel o +pad i +looo ol +ÑĢ од +it ss +eth ical +end uro +__ : +expend iture +mon ste +mas king +terri ers +ib is +e mber +cu mple +punctu ation +pi per +ir vin +ade e +yy yyyy +flash backs +cel sius +don nie +bo gota +ben evol +the script +shil pa +pro se +fin dia +ze ke +ne ko +do ves +blues lyrix +fro sh +sowe to +mp lo +al ai +sab i +raq qa +wf tv +stro ller +ian somerhalder +ðŁĶ ª +an on +mo seley +! ?!? +sta king +mol y +car tri +c sg +ast or +transc end +ma er +de ux +cow girl +sas k +pun ter +ma ken +o ates +love tt +grow ler +sag in +v n +ssi ble +officeof rg +y mc +sab ar +faul ty +ap ha +ak on +ðŁij « +snow don +ae w +raise the +ðĿ ĵ +grue some +clement ine +sp ing +lat a +worlden viron +mi mic +can aria +bakhtawar bz +ao a +fal a +ãĤ Ń +avi va +you uuu +thi gh +la dders +gu mbo +tz ky +fu zz +plastic pollution +est ate +strength ened +k ant +dr in +cal vert +transform ational +frigh tened +mac lean +elited angerous +ear thy +t son +to da +j nu +.. , +mic hal +i ban +je ong +is real +sim coe +exclu sives +blue bells +ben e +te u +pil sner +pens ke +athe ists +m pu +cartag ena +ðŁĴĹ ðŁĴĹ +million aires +kk kk +it ar +subscri ptions +remo te +ma fi +hin ton +w cc +ho k +ds b +ab leton +sevent y +pun ks +e indhoven +sh one +mcfar lane +lim popo +empha si +à ¼ +sin fo +pe tre +man grove +ch ino +ber tie +play lists +push awards +p af +deb bie +c do +r ino +ðŁı¾ âĢįâĻĤï¸ı +fol ke +bon nar +th ine +sl an +hal ter +evi e +aw some +vul tures +spar ky +seiz ures +âľ Ķ +ram one +ine ffe +al n +pro ctor +ast ra +the voice +gro te +sci on +dead line +am aya +tain ted +patter ned +exce eding +cross fit +kay lee +drop box +ru shes +tack led +mo by +retro gamer +n cbd +benef itting +shay kh +guild hall +gen try +dream cast +dread ed +bun dled +th aw +revol ving +n pt +kylie jenner +imagin ative +ron i +over came +family time +ds burg +car naval +relation ship +recogni zable +cor oner +ho le +fan fic +emir ates +bur ritos +analy se +thin ner +ne es +galli poli +bl r +cat woman +-- >> +au lt +ada ily +nau ghty +ili o +solit aire +mtv br +jocel yn +arun ach +rep ent +south gate +hy acin +essenti al +fent on +and um +it or +go pal +sl inger +po sei +aw il +wi elding +ra ila +eli as +a sto +à ¤ +tend ency +str ata +ker t +< - +im acele +da es +sti mulus +han ley +fit nes +ec stasy +lim ous +ha iling +ðŁ¤ Ń +chis wick +tar ies +sla v +pul i +moderni zation +black mail +b ingham +h fx ++ + +ðŁĩ®ðŁĩ ³ +ni v +we a +profess or +k off +bol ster +su ave +sequ ences +pepper oni +not te +dre n +ãģ¨ ç¹ĭãģ +hs v +o ga +ap tly +z ad +excel si +rin ka +mol dova +min n +ma bel +conferen cing +bas ing +of er +ob si +hamill himself +care less +brief ed +inhe rent +par ish +dub nation +town sville +sar awak +gee ky +doncaster isgreat +was abi +gu p +phen o +dra inthe +carrie underwood +ble eds +bbc world +ane w +alta f +dul wich +ani ston +w ti +sumat ra +gra fton +bl n +me ster +bode ga +re go +es q +an jo +sump tuous +mai sie +ï¿ ½ +wil t +jak ob +el vis +se pul +mu ster +air pollution +president e +happy monday +exten sively +fl ondon +t ls +play ing +pe ed +din ho +var dy +pi ka +n iro +au cus +ðŁį ¦ +nu ll +el ondon +juvent us +imag ines +dis ab +lit o +d ura +work places +promo te +mc caf +wood work +waw x +à® ª +tt ino +shar i +sem per +better together +ðŁijĬ ðŁı» +ze bra +pon dering +en chil +ho m +cosm ic +tan z +mo cked +ec cc +ath ed +abo lish +prop eller +paris agreement +assemb lies +indu stry +fraudul ent +pe sa +chang min +ax x +ðŁĴ µ +irr ational +cu sa +ramad han +octa via +on elove +jac ki +bar ak +taxi der +seri ous +nathan fillion +mc en +ch k +po part +grav ity +copp ola +reading fc +illu sions +j ig +ww x +re sh +ex porting +buzz ard +âĻ ¤ +p cm +lan apar +ko s +arom as +antal ya +ww dc +ven a +phil a +ball in +ðŁij Ħ +quin ta +ma o +f ery +eigh ty +sentim ents +safe guarding +r wa +pu ffs +luc ille +de cath +sl u +nu gent +de ter +braz il +ze iss +super bowl +subsi dy +alter n +hi dalgo +enz ymes +ä ½ +tag ne +hair dresser +adri en +walk out +oppo ses +can tina +bed side +af an +ðŁĶ Ĺ +prophe tic +dan es +un successful +super charged +pk k +exem ption +hart le +secu lar +cli pping +br s +united way +c net +pat chy +ha gan +e en +âļ ľ +var a +sym pathi +never trump +affir mation +om f +ny cfc +ma ja +sur ro +keer th +up scale +sandal wood +mon archy +kno bs +å ĭ +po tholes +hunger games +ter races +na sir +coun sell +welcome to +wa q +se aman +m ita +stun ningly +on theroad +in ability +) !! +bon go +ant v +sp ut +worldenviron mentday +resu sc +y td +fi m +eun hyuk +sa chin +rose anne +cler mont +ape c +am ina +v ening +n antes +al most +sin us +ex as +ty l +ti en +ple ad +lanc s +bur naby +re k +jo om +observ ers +disco graphy +cl g +âĻ ¦ +sn ack +r ti +o ily +crystal li +bru te +web development +topp ings +la f +an is +ad der +reli ving +car lin +battle of +we g +syri an +pon t +n dc +lagh ate +yu ma +sp p +p iti +ro bbing +mart ing +rey kja +raj put +nc ds +kie wicz +âĢ¢ âĢ¢ +vam pire +substan tially +opio ids +nepal i +k line +ar oo +under stand +lit t +u it +thro mbo +sar ies +qu ot +b alling +t tr +s gh +philip p +br ant +ac l +m ello +whit taker +. ; +defi ant +b gc +repl ying +mir ren +metamor pho +sch wab +bul ge +utili zed +pick ering +par don +d sa +ภĪ +doo ley +cumul ative +Ð » +ur gency +e mir ++ /- +¦ Ī +ot as +âı ³ +station ed +grape vine +ar ac +karan johar +f ancy +sau l +coo gs +lgbt q +ا٠ħ +jav i +u mmer +pl l +den is +dai pur +pu ffin +lewi sham +fand om +co pe +ves matter +s ve +hel pless +deo dor +ostr ich +kaz an +friday the +con dor +v x +sophom ores +rob les +cu tt +cli mbers +ë¦ ¬ +sle g +sn f +mac ys +hydr ating +grou pe +po yn +mou lin +hg tv +lmfa ooo +sulph ur +asdfghj kl +annab elle +hump back +bra ved +viswas am +multi purpose +hu midi +escor ted +barb ican +f ad +cor sa +ðŁ¤ « +pi ppa +here to +can y +ser gi +or cas +o vie +ed ou +s any +glob alization +man cini +food truck +f is +defi brill +sch re +sma fia +love wins +la ut +k aka +hol lande +game on +resurg ence +out side +olympi ad +int an +abstr action +rapi d +pal om +cal le +jas min +attack ers +swag g +mit ra +ky lo +à® ² +her mitage +gor do +e ira +so sfam +roll out +exc ite +sy nod +mer rill +c als +as sa +liveli hoods +ju ve +the black +gopack go +ant lers +alban ian +wool ly +qu iche +puri fication +are th +smar thome +ne k +all blacks +mex icans +is m +ger ms +comple xion +mar ck +u shi +ðŁIJ IJ +char l +ca stic +till erson +giuli ani +biode gradable +mal bec +bo is +ju bil +im es +r ame +gene tic +esp nu +ch ley +so ho +go pher +g sc +buu ren +cu be +bridesma ids +webin ars +to e +mani pur +viol ently +notic ias +ex changing +chi ev +replac eable +muay thai +bu ss +sp il +instal ment +div ya +cait lin +o lim +fil tering +whirl wind +sta red +prior it +pr am +pompe ii +mono logue +k ite +bu ka +âĢ¦ .. +vac cine +bre ro +woz ni +sol ent +re ferr +my rt +gridi ron +galatasar ay +fro ze +clare mont +ðŁ¥ ĥ +victori as +ssel dorf +pa stures +net neutrality +ch or +ðŁij ģ +ಠ¿ +we ho +symp tom +jo sel +in ous +dragon con +power ball +p te +four thofjuly +ec la +ear buds +where abouts +salt life +depriv ation +ch ter +wi ggle +syste m +ps st +ch az +d any +ri mo +oax aca +lanapar rilla +barcel on +melanch oly +way back +ho tro +n si +l illy +kur o +ja han +intellec t +board game +ðŁı Ĭ +sneak peek +k prc +jail s +cand el +zan zi +mor timer +star ch +ra gs +p fa +long live +k art +gir ona +cro cker +christop h +precau tions +war ship +per m +paren t +van gogh +gif ford +allegh eny +ra yn +ut m +sten cil +rec alling +pen ney +z azzle +ìĥ Ŀ +hin ds +aren as +nu ev +law ler +gu in +do this +ðŁij ķ +ì¶ķ íķĺ +we g +ti b +ri din +complex es +turbul ent +pe sos +de marcus +vall arta +sam sun +kis ses +hein rich +deport es +wil ms +ur d +then ext +inki gayo +ho wi +fir sts +carri age +clean liness +mas war +is ch +ax el +si zzle +road house +fr ans +ent ourage +co bble +boo th +benedic t +tal on +fc u +year ofthe +ray on +raider nation +fo yle +ko val +pi anos +l pg +bur mese +man ure +geo caching +cosc ino +b np +fer ra +stro phy +mar ais +ce es +legen dof +kat niss +eno ch +av ed +you know +d prk +ðŁĺ¢ ðŁĺ¢ +sp un +pro st +sor rows +cent red +ke a +gal icia +? ðŁ¤Ķ +ÑĢод а +bou chard +ðŁĴĻ ðŁĴľ +yu i +seed lings +jon ah +reco vers +ny rd +board room +su ma +my japs +tun g +sha i +ir gc +eli o +wag ons +ka shi +polic emen +john nie +ale coscino +shop ify +dot ted +de tri +va w +to fficial +in your +chal mers +trac ed +no vi +by es +ari el +nipp on +la pel +gri ez +b gs +fool ing +d ita +vijay sethu +nm wx +as ot +kr anti +hel m +ve di +sic kest +mo chi +k abo +shru bs +he red +b sp +sq m +ham r +dul kar +anth a +nr f +avoid ance +at en +publi x +be arers +nas i +ha p +h ells +ðŁĸ ¥ +ภ· +thelast jedi +oh wx +ðŁį « +wa hoo +there se +rec aps +ss nhq +bird photography +v ay +pet ti +pau lo +bel vedere +( * +gr l +du vet +c pec +sa it +por sch +meas urable +avi ators +fre mantle +bre en +on om +me and +life saving +eu ref +en don +embar as +aira sia +el is +dun kin +star magic +s ill +porto bello +ki efer +ex e +mu ted +ãģ ¦ +we thepeople +logi a +liber al +theforce awakens +min ed +haun ts +freck les +care taker +s india +âķ IJ +dev lin +list on +direction er +oh n +fi garo +em manuel +du bois +cl ones +bru ise +ðŁİĪ ðŁİī +disin fe +der matology +as r +s watch +dis comfort +tam anna +pi day +mack en +k atic +delu sional +shaw nee +gu d +al bino +p ali +din gh +cucu mbers +coffe y +anticip ating +treas ured +web summit +shel tered +sav or +pedago gy +m gs +sh ma +s bu +den ali +cam pos +bubble gum +o ir +le aps +y ler +r one +sansk rit +min t +meat less +futuri st +du de +a vel +prote sted +squ ire +z aki +sz n +har court +cycl one +bour dain +gather ings +d ant +advent urer +parag on +alt man +dd ing +ban erjee +snorkel ing +mother well +mis sy +en der +glo ws +ki wis +chick pea +por o +e fron +app t +u y +speci fied +gab by +e strada +com bos +bour bon +vin i +var un +steph ani +key words +car vings +amit abh +wr ought +tw al +re els +clu bbing +ubi quit +cri t +ambed kar +æ Ļ +prun ing +vaccin ated +boe ing +s ks +lo ona +hypno sis +edel man +pho l +he w +colo sse +mckin sey +u on +to te +sacrific ing +ox i +n ang +e mu +пÑĢи ÑĢода +m th +kers wednesday +argu ed +timel apse +ris king +regul ating +ni gh +likeli hood +cu bic +au ction +rein for +pi stor +no ses +ye l +snu ggles +pe i +jean ette +ta ku +ri th +guy z +ภŀ +y te +ver ted +pay soff +jau regui +hoo ligans +procedu ral +mi b +har dy +el eng +chec kers +all ine +the met +prou dof +keerth yofficial +collabor ator +ni u +infl icted +adv ani +re twee +memor iam +f icial +ti ghter +sal em +re viewers +br ics +ben digo +am ell +tur kish +sush maswar +paul son +pal awan +mol lie +stitch er +s burgh +ir u +hay dn +en ers +aro a +u zzi +saraj evo +hel a +apol lo +nine ty +vac a +sp on +vent u +jel ena +hei fer +avo ids +sp ine +pri ze +mar ist +re creating +me de +woo den +find lay +ro fl +n di +compreh end +yu go +y ü +to work +u fos +son ar +pi ston +recor ding +tent ative +art forsale +pel lets +fre do +ÙĪ ر +mu ses +custom ization +pro found +is ner +ide ally +si am +plan kton +cm dr +man ger +fran ken +customiz able +ठ® +walk away +swi vel +vast ly +no ton +lex a +ex moor +z as +tan te +reduc tions +lol ly +hip sters +benef ited +ë ² +ww www +mascul ine +fi ji +dre y +ph ill +ane ous +nic ol +men dez +disapp ro +ch ner +through s +shen mue +east man +ðŁIJ İ +yu ck +under tale +re ys +go beavs +eng en +c na +mer r +bir k +ãģ¨ç¹ĭãģ ĮãĤĬãģŁãģĦ +âĥ£ @ +yn na +ste ed +offen der +at um +vani shing +presi denti +love them +g nocchi +fri ggin +per il +mad hya +ag ne +dee jay +mar nock +m tb +fold able +@ ___ +stand re +bron x +bow ski +fin ite +cro ckett +b sf +ge tit +seren awilliams +mir o +ignati us +sla y +rin se +fon due +sel dom +s more +gan i +dy ce +dmit ry +cru mb +late post +pri mark +oh ana +flor als +do a +remembrance day +d ds +azi one +toon ami +air port +æĿ ± +th ad +fi st +dine sh +dr who +ad words +admi rer +pro je +kyrgy z +à « +manife station +le wan +j ic +thi bau +le ased +van ity +nouri shed +never theless +aug mente +fu elled +che ad +wil shere +ru di +p z +my co +mor ro +herbali fe +hardro ck +de man +dre ality +sp ades +ce vic +bha i +bar on +ultimat efan +hou news +to bi +stru t +ke el +affili ation +the masters +sm al +hu e +este ban +con v +om nic +datab ases +co v +ter ti +st g +snoop dogg +metab ol +leth bridge +ðŁı» âĢįâĻĢï¸ı +year ling +residente vil +nws l +iy aki +griez mann +c ous +ðŁĵĿ : +tor ian +sam i +ðŁĶ¥ðŁĶ¥ ðŁĶ¥ðŁĶ¥ðŁĶ¥ +g are +alli ances +whit field +we ther +refin ing +coy i +kra ken +ðŁĺĺ âĿ¤ +singul arity +lil i +h ns +bol dand +waw rinka +misogy ny +lo vers +c q +b dg +ad ona +gar ter +women of +sc d +recogn ising +mun a +str ou +sign alling +lare do +hell boy +alek sand +un available +pedi atric +as in +mer ia +ri shi +futuri sm +w ye +polari zed +e we +pro pel +in forms +cre ase +~ " +arti ston +like for +heidel berg +er ra +life in +len ny +inter rupt +cohe rent +ca z +vick ers +le veled +f bs +cab ins +bu mmed +apost les +we h +ten don +souven irs +infu ri +pier ce +asse t +m las +go th +di ggin +ann as +yl or +th waite +sw el +pan era +mur derers +croo ked +bs go +ac u +a on +re an +one of +ko hl +bloo dh +pest icide +lost dog +fle xing +ëĤ ĺ +su pra +eter nally +ðŁļ Ļ +pa olo +ol an +mom o +is elle +captain marvel +s lou +mistak enly +akhi lesh +mer t +il inan +bu on +bal kan +mir ro +mill en +der ail +dam on +tit i +bi os +re don +pic ard +par te +ðŁ¤ Ł +Ø º +son ics +fir sth +dd c +veg ans +tur ban +ni gan +lot tie +lyn don +star buck +pink floyd +life styles +am ara +a she +r sc +val a +sm er +cw gc +cli ent +buen as +jag an +coo ps +ðŁijij ðŁijij +speci alizes +snag ged +g lar +ben net +wildlife wednesday +bow den +pi k +art in +empor ium +ar l +re ba +pas ser +disappo ints +additi ve +âľĬ ðŁı½ +bay er +missou la +ha skell +comm ences +ni x +ne man +explo ited +plastic surgery +cc d +aso cial +vo t +sie gel +fro ome +kap am +far a +e ha +pro bes +mw f +meet ing +p bb +ak ins +mistle toe +kingdom hearts +for kids +ec r +bal e +escor ts +adidas originals +k wa +k ts +hallo ffame +ðŁĺį . +wag s +pot ted +o wing +honey comb +he fty +uro logy +mer le +b pd +stri pping +re ich +k state +gu ay +yon ge +shak ti +g loom +bat t +son om +n ery +el ba +blan ks +hel le +triple ts +bom bay +ak arta +ab ia +transm itted +rol f +ja is +angular js +fi erc +m ss +trac e +ॠĩ +tom bs +old man +kom bucha +fo l +e health +cere als +are lli +in ari +ðŁĴ © +wo l +liber ties +fa wn +af firm +nun avut +hyster ical +k drama +art es +âĢ¢âĢ¢âĢ¢âĢ¢ âĢ¢âĢ¢âĢ¢âĢ¢ +valent in +man slaughter +gal es +eo in +energi zed +del s +with draws +st les +sar castic +ram esh +incredi bles +lock hart +ya wn +ultimatefan live +oooooooo oooooooo +mu en +guru dev +te er +pe eling +new snow +lingui stics +direc tv +ag end +uni lever +ru ger +han dedly +ero se +li mel +the c +royal ties +fini shers +nr g +m gt +fid get +com ps +bac on +aggre ssively +ab it +ch â +tar de +slu gger +q anda +gre ening +d ats +ensla ved +spec tor +o ye +fre ef +b hand +stop brexit +mis conceptions +cav a +ðŁĺįðŁĺįðŁĺįðŁĺį ðŁĺįðŁĺįðŁĺįðŁĺį +multit asking +hou sel +ferre ira +cen time +ank les +jo dh +hel ly +fro me +out tuesday +nar nia +bal aji +l bloggers +jyo ti +ðŁį ĩ +lan cia +cap ri +y ap +nat ash +down fall +." âĢĶ +à ® +ligam ent +coat ings +ai ded +hi ko +fall ing +encryp ted +yeg food +infringe ment +cu di +ce p +ðŁĺį ðŁĺĤ +tra d +super rugby +ed win +wh iche +vi meo +lay ne +in vigor +he he +dubrov nik +bie ber +u tr +sham an +op ers +ham ill +en ig +di f +ar um +scrap book +min h +diver gence +mckin non +life time +guter res +wil le +ple as +patt y +mic ron +k z +dom aine +ru sher +m ds +ches ney +screw driver +âģ© , +sle dge +hau er +chan a +stam ina +sprink ler +pl n +he ff +bol ton +om on +car rington +accor dion +jor ge +inter ception +in puts +gu ll +tran scription +vanu atu +it ical +eth os +tic h +spac ey +pee king +u mi +ha ger +psycho tic +illi an +illi a +bonnar oo +an ese +pu c +laghate parth +en hall +econom ical +dre dge +% - +u we +tu bular +scoun cil +pe asants +fl er +tumb ler +he p +ford ham +row ley +initi als +ev asion +er nation +plu gins +coch ran +c attle +acid ity +ðŁİĬ ðŁİī +re grann +jump man +ef ace +x ma +patri archy +esco bar +cristi an +tip ton +nu eva +hack ney +back seat +kill arney +aid an +sta dion +simul taneous +ida ho +a je +u th +figu re +clo s +bur k +volun tar +rec ite +macfar lane +cur few +bou do +w gn +sti x +sla p +scrat ched +philli p +jour ne +ex pelled +wa z +u ke +tati ana +ou e +ho pp +dimit ri +ðŁĵ £ +mato logist +electri fying +blu ffs +bill smafia +az cardinals +y aa +x mas +shar a +r ith +g ills +dre s +bar ton +authori zation +imperi alism +home of +to do +foot path +band width +visit spain +moh sin +erup ted +mi ki +insig nia +mike l +ss h +ger a +bank holiday +aw an +t weak +star craft +e al +construc tion +skelet ons +le ep +ine m +bar clay +ship wreck +monsi eur +yo h +ron t +form ative +ser o +le p +horse man +hoo sier +haz mat +cylin ders +cen ti +ðŁĴ¥ðŁĴ¥ ðŁĴ¥ +re em +na ire +mus ically +gras shopper +est onian +termin ology +ro main +blogger rt +tox in +stan ce +cultiv ated +an ast +ðŁIJ į +shi mano +go pher +ene i +recycla ble +gam ification +fight for +c q +avoc ados +ke ys +eli ke +gly cer +shak ur +mobili zation +gal ley +expla in +ex changed +pe th +obe dience +illa ge +en nis +ãĥ ŀ +wi v +walla bies +ma ar +ig ers +fin tech +fin alized +wo j +meaning less +in field +onna ise +e et +bron te +pass ages +ðŁij § +strick land +northern lights +lom ond +h tc +wr ay +shi fter +di alog +ðŁį į +>> >>>> +te atime +ste ch +sic huan +qu ill +fran ca +comple mentary +bar rington +marcu s +mal am +goo oo +for sa +elec tra +af s +âĹ Ĩ +tri fe +sn azzy +fo lia +and olan +after dark +wood son +stra de +litt lest +o gun +con wy +co wards +ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤðŁĺĤ +íĬ ¸ +se ul +mur phy +dun ks +kapil shar +jo achim +wom ack +equal ity +aver ages +a ine +ðŁ¦ Ī +tac ular +dis ability +u ked +mid century +bar thol +teas ers +tab ern +nj caa +sp out +op i +ku bball +bl om +so ar +popu lism +meth yl +ðŁijĬ ðŁı¼ +o spre +alo ils +ðŁĵ ĸ +ðŁĮ ļ +x er +sp illing +publ ica +car dam +adi sh +sa cha +p kg +bu da +lyric ist +i bc +gru mp +ho ver +hal ep +anti body +anem one +âĻ¥âĻ¥ âĻ¥âĻ¥ +m cl +litho graph +cc u +s fest +path ic +calli ster +otta wa +gun sn +rut ger +hali but +en vision +differenti ate +ðŁļĢ ðŁļĢ +pir an +lat el +uc n +trou bad +ra ine +fierc ely +learn english +lea se +wex mondays +em it +dray ton +bur rell +scuba diving +hol ler +dr u +clo cked +w ral +ap ro +trans lucent +w bo +patri arch +mo ja +lan nister +fish ery +ne derland +mil dly +mi rai +ma ko +ja p +ðŁĺ©ðŁĺ© ðŁĺ© +pro statec +p anna +ar ama +under taking +tomp kins +ne op +soli ds +sav oury +e ames +cut lery +wood bridge +steam er +ri zzo +wild cat +rat na +lamin ated +kin eni +jal ap +ai des +acknowle dges +?! ?!?! +! ðŁİī +w afc +mag gio +ha ves +dar je +of i +gr il +v asi +bru x +mo hd +fake speare +arn old +r mb +for be +wal leye +ro di +therapeu tics +strate gi +ob ste +mu dder +download able +dd ings +d ca +asi angames +campe on +appropri ation +th century +ram atta +dra ped +bul lion +mu c +one x +se greg +ophel ia +bod ily +âĿ¤ ðŁĺį +wi zar +te ased +ade my +to id +sur a +lazar us +sn ickers +ma se +lo h +bow ed +bibli o +x change +har lan +gho shal +flavor ful +bha gat +alle z +whiche ver +ten stein +disc er +organ iser +mt g +dream liner +t se +hok kaido +mo k +indulg ent +hick man +blin ded +al yn +aaa ah +sp ool +lough borough +inter pret +et v +aristo tle +optimi zing +avici i +madu rai +ju li +naw az +mat chups +ab ide +paint ing +w elling +vel i +octag on +in scribed +po king +plac er +life cycle +kili g +g sp +eli ves +cle ments +na sheed +me sut +incarcer ated +dist illed +wal ang +delic acy +del gado +che z +ch ita +ad ero +tu x +pati l +o do +abh cosmetics +tv c +p bc +in accurate +hardwork paysoff +ball er +quot ation +merchandi sing +ga stri +defen ses +dro gba +bex hill +ban kno +win ona +si eg +p gs +hahah ha +agu chi +su bram +mirac le +de sch +li bre +ba cher +ent ine +bbcra di +lou dest +r ps +pi erc +fr yer +storm trooper +rafael nadal +pas co +exhau stion +epic onetsy +rc tid +kel lie +ga ines +d bz +sm riti +s bridge +lim ited +cla w +technic al +bio graphical +ado red +ภ° +exclu de +ac adia +key boards +fur man +so ca +sur u +ni ps +sw aps +server less +run e +pu ffy +north ampton +nish ings +hen der +cartri dges +gun shot +ðŁĵ ¹ +fil ament +respon dents +pey ton +mountaine er +mer ging +life span +intimid ation +p afc +nl wx +expan sive +pur r +f ck +ca e +at ti +tele thon +so hn +mend el +lo pes +dor i +un broken +te red +tast ings +in active +disin tegr +t assel +share the +pi ano +is lay +air space +z awa +ricci ardo +ming ton +fresh er +cur ry +re vs +pharo ah +h mv +exhilar ating +wh oo +lin kin +kri spy +competen cy +ste wards +ne bu +kat su +ad mins +baz ar +as ar +giving back +s summit +song z +lin us +raj kumar +farm ington +fanta sia +ðŁĺ´ ðŁĺ´ +so bri +lis se +barry more +pri sm +blo b +sen ew +mono xide +exp ire +eigh teen +di pper +xi ao +kil t +hin ch +bbc sport +bam boo +p ter +ex al +ðŁ¦ ĭ +ham lin +expe ditions +star gazing +food security +wy lie +ul f +st ingly +on storm +lo eb +bro ome +bn ha +pancre atic +eli ve +!!!!!!!! !!! +ther apper +ortho pedic +avengers endgame +antit rust +ìļ ° +go te +om d +off side +gy llen +win eries +white water +ad l +lu pita +exce eds +consi sted +chew bacca +ash leigh +nhl jets +is san +sh ld +hay at +cran berries +ðŁ¤ĺ ðŁı½ +rock the +spring training +fall out +dairy free +wa j +un decided +so wn +rc n +north wales +htt r +fu mble +d its +comp elled +popu list +min ted +blan chett +. '' +pro pulsion +m illa +au berg +her tz +h ta +u daipur +serendip ity +azte cs +als ace +ðŁIJ ij +lu n +sho es +char li +gar za +ðŁĴ Ł +pro biotics +fox tv +ol is +mi ff +loc alized +diffu ser +si gue +fun ko +rend ous +ðŁĴ ij +jeky ll diff --git a/ldm_patched/modules/sd1_tokenizer/special_tokens_map.json b/ldm_patched/modules/sd1_tokenizer/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..2c2130b544c0c5a72d5d00da071ba130a9800fb2 --- /dev/null +++ b/ldm_patched/modules/sd1_tokenizer/special_tokens_map.json @@ -0,0 +1,24 @@ +{ + "bos_token": { + "content": "<|startoftext|>", + "lstrip": false, + "normalized": true, + "rstrip": false, + "single_word": false + }, + "eos_token": { + "content": "<|endoftext|>", + "lstrip": false, + "normalized": true, + "rstrip": false, + "single_word": false + }, + "pad_token": "<|endoftext|>", + "unk_token": { + "content": "<|endoftext|>", + "lstrip": false, + "normalized": true, + "rstrip": false, + "single_word": false + } +} diff --git a/ldm_patched/modules/sd1_tokenizer/tokenizer_config.json b/ldm_patched/modules/sd1_tokenizer/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5ba7bf706515bc60487ad0e1816b4929b82542d6 --- /dev/null +++ b/ldm_patched/modules/sd1_tokenizer/tokenizer_config.json @@ -0,0 +1,34 @@ +{ + "add_prefix_space": false, + "bos_token": { + "__type": "AddedToken", + "content": "<|startoftext|>", + "lstrip": false, + "normalized": true, + "rstrip": false, + "single_word": false + }, + "do_lower_case": true, + "eos_token": { + "__type": "AddedToken", + "content": "<|endoftext|>", + "lstrip": false, + "normalized": true, + "rstrip": false, + "single_word": false + }, + "errors": "replace", + "model_max_length": 77, + "name_or_path": "openai/clip-vit-large-patch14", + "pad_token": "<|endoftext|>", + "special_tokens_map_file": "./special_tokens_map.json", + "tokenizer_class": "CLIPTokenizer", + "unk_token": { + "__type": "AddedToken", + "content": "<|endoftext|>", + "lstrip": false, + "normalized": true, + "rstrip": false, + "single_word": false + } +} diff --git a/ldm_patched/modules/sd1_tokenizer/vocab.json b/ldm_patched/modules/sd1_tokenizer/vocab.json new file mode 100644 index 0000000000000000000000000000000000000000..469be27c5c010538f845f518c4f5e8574c78f7c8 --- /dev/null +++ b/ldm_patched/modules/sd1_tokenizer/vocab.json @@ -0,0 +1,49410 @@ +{ + "!": 0, + "!!": 1443, + "!!!": 11194, + "!!!!": 4003, + "!!!!!!!!": 11281, + "!!!!!!!!!!!!!!!!": 30146, + "!!!!!!!!!!!": 49339, + "!!!!!!!!!!": 35579, + "!!!!!!!!!": 28560, + "!!!!!!!!": 21622, + "!!!!!!!": 15203, + "!!!!!!": 9168, + "!!!!!": 5203, + "!!!!": 2360, + "!!!\"": 28048, + "!!!)": 42532, + "!!!": 995, + "!!\"": 20556, + "!!#": 34997, + "!!)": 28352, + "!!": 748, + "!!@": 40705, + "!\"": 2947, + "!\"@": 43819, + "!#": 9670, + "!'": 13222, + "!),": 37904, + "!).": 26225, + "!)": 4571, + "!*": 37737, + "!,": 29325, + "!-": 43499, + "!...": 22121, + "!..": 35475, + "!.": 22517, + "!:)": 31671, + "!:": 17545, + "!": 256, + "!?!": 29767, + "!?!?": 47081, + "!?": 6004, + "!@": 15117, + "!]": 34466, + "!âĢ¦": 35068, + "!âĿ¤ï¸ı": 32559, + "!ðŁİī": 49085, + "!ðŁĺĬ": 43434, + "!ðŁĺį": 36438, + "\"": 1, + "\"!": 10377, + "\"\"": 41530, + "\"\"\"": 25539, + "\"\"": 8575, + "\"#": 8345, + "\"'": 31065, + "\"(": 32741, + "\")": 13112, + "\",": 4332, + "\"-": 9375, + "\"....": 37785, + "\"...": 9049, + "\"..": 25403, + "\".": 2811, + "\"/": 39486, + "\":": 7811, + "\";": 37549, + "\"": 257, + "\"?": 11727, + "\"@": 1512, + "\"@_": 20236, + "\"[": 36930, + "\"âĢ¦": 33993, + "\"âĢĶ": 41151, + "#": 2, + "##": 15483, + "#...": 31491, + "#:": 30144, + "#": 258, + "#@": 35062, + "#âĢ¦": 12834, + "#âĢİ": 34262, + "$": 3, + "$$": 24233, + "$$$": 31859, + "$$": 14929, + "$)": 39460, + "$.": 34682, + "$": 259, + "%": 4, + "%!": 35070, + "%),": 37819, + "%)": 16063, + "%,": 14505, + "%-": 48784, + "%.": 12475, + "%;": 33379, + "%": 260, + "&": 5, + "&&": 27791, + "&": 261, + "'": 6, + "'!": 13781, + "'\"": 19479, + "'#": 15319, + "''": 46594, + "''": 8445, + "')": 19175, + "',": 5662, + "'-": 26152, + "'...": 20474, + "'.": 4645, + "':": 7182, + "';": 44517, + "'": 262, + "'?": 17242, + "'@": 26397, + "'d": 1896, + "'ll": 1342, + "'m": 880, + "'re": 982, + "'s": 568, + "'t": 713, + "'ve": 1200, + "'âĢ¦": 42120, + "(": 7, + "(!)": 30253, + "(\"": 18741, + "(#": 6229, + "($)": 46597, + "($": 15186, + "(&": 15042, + "('": 18235, + "((": 22944, + "(((": 33287, + "((": 13796, + "().": 41737, + "()": 8475, + "(*": 48004, + "(*": 39575, + "(+": 12903, + "(-": 20228, + "(...": 45159, + "(.": 43055, + "(:": 8528, + "(;": 23983, + "(": 263, + "(?)": 22885, + "(@": 2181, + "(£": 33987, + "(©": 44886, + "(ðŁĵ·:": 34610, + "(ðŁĵ·": 37999, + "(ðŁĵ¸:": 44422, + "(ðŁĵ¸": 45204, + ")": 8, + ")!!": 47518, + ")!": 7805, + ")\"": 13046, + ")#": 39981, + ")'": 23613, + ")(": 27956, + "))": 13720, + "))))": 42911, + "))))": 34181, + ")))": 18305, + "))": 5167, + "),": 2361, + ")-": 19034, + ")...": 15274, + ")..": 41822, + ").": 1818, + ")/": 26616, + "):": 4143, + ");": 19686, + ")": 264, + ")?": 18765, + ")@": 41928, + ")_/": 45028, + ")_/¯": 45781, + ")âĢ¦": 41844, + "*": 9, + "*)": 30956, + "**": 9825, + "****": 21326, + "********": 42974, + "*****": 43571, + "****": 25167, + "***": 7829, + "**": 4441, + "*,": 41895, + "*-*": 23568, + "*.": 31304, + "*": 265, + "*_*": 44535, + "+": 10, + "+)": 34810, + "++": 47298, + "+++": 35986, + "++": 19056, + "+,": 35885, + "+.": 25238, + "+/-": 47614, + "+": 266, + ",": 11, + ",\"": 3823, + ",#": 11215, + ",&": 26905, + ",'": 10599, + ",)": 44493, + ",,": 21340, + ",,,,": 33225, + ",,,": 14811, + ",,": 8844, + ",-": 29821, + ",...": 20365, + ",.": 41277, + ",": 267, + ",@": 13975, + ",âĢ¦": 14601, + "-": 12, + "-\"": 18646, + "-#": 10151, + "-$": 24946, + "-'": 28010, + "-(": 33345, + "-)": 3535, + "-*": 21527, + "--": 2154, + "----": 5753, + "--------": 11772, + "----------------": 23122, + "----": 30164, + "---->": 35999, + "---": 11079, + "--->": 14518, + "--": 2432, + "-->": 6422, + "-->>": 47252, + "-.-": 32765, + "-...": 43147, + "-.": 44040, + "-": 268, + "->": 5081, + "-@": 10087, + "-_-": 27227, + "-__": 42718, + "-âĢ¦": 30047, + ".": 13, + ".!!": 37805, + ".!": 14030, + ".\"": 18650, + ".\"-": 21234, + ".\"": 1081, + ".\"âĢĶ": 48703, + ".#": 5014, + ".'\"": 41558, + ".''": 49379, + ".'": 5938, + ".(": 22294, + ".)": 5376, + ".*": 26145, + ".,": 5276, + ".-": 12481, + "..": 608, + "..!!": 23707, + "..!": 17994, + "..\"": 15229, + "..#": 15735, + "..,": 47143, + "...": 3002, + "...!!!": 38351, + "...!!": 39915, + "...!": 16860, + "...\"": 5240, + "...#": 8195, + "...&": 44979, + "...'": 23167, + "...(": 37981, + "...)": 14040, + "...,": 42717, + "....": 2386, + "....\"": 26689, + "....#": 20346, + ".....": 34151, + ".....#": 38867, + "........": 8246, + "................": 24855, + "............": 42965, + "...........": 35008, + "..........": 25526, + ".........": 19881, + "........": 14720, + ".......": 9917, + "......": 5590, + ".....": 3104, + "....": 1390, + "....@": 29790, + "...:": 34570, + "...": 678, + "...?": 16388, + "...@": 12672, + "..": 852, + "..?": 23875, + "..@": 21124, + "./": 31975, + ".:": 15811, + ".;": 47596, + ".": 269, + ".<": 29442, + ".?": 29294, + ".@": 1230, + ".]": 33511, + ".~": 42651, + ".âĢ¦": 18047, + ".âĿ¤ï¸ı": 39085, + ".âłĢ": 30097, + ".ðŁĺĤ": 46580, + "/": 14, + "/#": 13217, + "/$": 36266, + "/-": 19811, + "/.": 39382, + "//": 15348, + "////": 46271, + "///": 22734, + "//": 3502, + "/": 270, + "/@": 8216, + "0": 15, + "0": 271, + "1": 16, + "1": 272, + "2": 17, + "2": 273, + "3": 18, + "3": 274, + "4": 19, + "4": 275, + "5": 20, + "5": 276, + "6": 21, + "6": 277, + "7": 22, + "7": 278, + "8": 23, + "8": 279, + "9": 24, + "9": 280, + ":": 25, + ":\"": 29498, + ":\")": 46432, + ":\"": 12089, + ":#": 26625, + ":$": 33769, + ":'": 8017, + ":'(": 21250, + ":')": 10701, + ":'": 23851, + ":((": 42496, + ":(": 5965, + ":)": 11070, + ":))))": 42339, + ":)))": 21840, + ":))": 10164, + ":).": 39010, + ":)": 1408, + ":*": 12617, + ":-": 13021, + ":-(": 25137, + ":-)": 4223, + ":-": 10323, + ":...": 42140, + "://": 12441, + ":/": 13604, + "::": 33077, + ":::": 43818, + "::": 9788, + ":": 281, + ":>": 39677, + ":@": 14339, + ":]": 43486, + ":|": 45986, + ":âĢ¦": 22365, + ";": 26, + ";))": 41873, + ";)": 3661, + ";-": 35657, + ";-)": 10475, + ";;": 34824, + ";;": 24492, + ";": 282, + "<": 27, + "<-": 47280, + "": 34308, + "<<": 24588, + "<": 283, + "<<": 16482, + "<<<": 35054, + "<|endoftext|>": 49407, + "<|startoftext|>": 49406, + "=": 28, + "=))": 39587, + "=)": 17840, + "=": 284, + "==": 11748, + "====": 21734, + "========": 38952, + "==>": 29688, + "=>": 9714, + ">": 29, + ">.<": 38507, + ">:": 36196, + ">": 285, + "><": 28015, + ">>": 8270, + ">>": 2988, + ">>>": 6395, + ">>>>": 18461, + ">>>>": 18435, + ">>>>>": 32972, + ">>>>>>": 48947, + ">>>>>>>>": 41947, + ">_": 44144, + "?": 30, + "?!": 9785, + "?!!": 25342, + "?!\"": 29315, + "?!": 2835, + "?!?!": 16349, + "?!?!?!": 49084, + "?!?!?": 37619, + "?!?": 11395, + "?\"": 3283, + "?#": 24018, + "?'": 13610, + "?)": 9626, + "?,": 41628, + "?...": 22641, + "?..": 43905, + "?.": 41251, + "?:": 21067, + "?": 286, + "??": 5195, + "??!!": 43219, + "??!": 37341, + "??\"": 44996, + "??": 2197, + "???": 40017, + "???": 3824, + "????": 15936, + "????": 10362, + "?????": 21370, + "??????": 34589, + "????????": 45091, + "?@": 29258, + "?ðŁ¤Ķ": 47928, + "@": 31, + "@#": 39397, + "@.": 43730, + "@/": 28639, + "@": 287, + "@@": 30314, + "@_": 2692, + "@__": 17042, + "@___": 48308, + "A": 32, + "A": 288, + "B": 33, + "B": 289, + "C": 34, + "C": 290, + "D": 35, + "D": 291, + "E": 36, + "E": 292, + "F": 37, + "F": 293, + "G": 38, + "G": 294, + "H": 39, + "H": 295, + "I": 40, + "I": 296, + "J": 41, + "J": 297, + "K": 42, + "K": 298, + "L": 43, + "L": 299, + "M": 44, + "M": 300, + "N": 45, + "N": 301, + "O": 46, + "O": 302, + "P": 47, + "P": 303, + "Q": 48, + "Q": 304, + "R": 49, + "R": 305, + "S": 50, + "S": 306, + "T": 51, + "T": 307, + "U": 52, + "U": 308, + "V": 53, + "V": 309, + "W": 54, + "W": 310, + "X": 55, + "X": 311, + "Y": 56, + "Y": 312, + "Z": 57, + "Z": 313, + "[": 58, + "[#": 11115, + "[...": 39975, + "[...]": 43790, + "[": 314, + "[@": 15148, + "[]": 22240, + "\\": 59, + "\\'": 41239, + "\\": 315, + "]": 60, + "]\"": 39434, + "],": 34067, + "].": 26262, + "]:": 21641, + "]": 316, + "][#": 39009, + "][": 29329, + "^": 61, + "^)": 30720, + "^-": 43516, + "^.": 31552, + "^.^": 35791, + "^": 317, + "^^": 34454, + "^^": 9064, + "^_": 14423, + "^_^": 15995, + "_": 62, + "_'": 44701, + "_(": 36951, + "_)": 37393, + "_*": 36237, + "_,": 31417, + "_-": 23193, + "_.": 26841, + "_/": 37647, + "_:": 13109, + "_": 318, + "__": 2355, + "__:": 47043, + "__": 3838, + "___": 43812, + "___": 13530, + "____": 4727, + "____": 25350, + "_____": 38803, + "________": 9549, + "________________": 20115, + "`": 63, + "`": 319, + "a": 64, + "a": 320, + "aa": 1821, + "aa": 3894, + "aaa": 14376, + "aaa": 9583, + "aaaa": 6727, + "aaaa": 19336, + "aaaaa": 31095, + "aaaaaa": 44413, + "aaaaaaaa": 23126, + "aaaah": 49151, + "aaah": 35856, + "aaay": 37846, + "aab": 34108, + "aac": 23251, + "aac": 11346, + "aad": 20464, + "aad": 35894, + "aaf": 37638, + "aaf": 31534, + "aag": 42174, + "aah": 28990, + "aaj": 28727, + "aaj": 43411, + "aak": 37739, + "aal": 22268, + "aal": 30208, + "aali": 27896, + "aaliyah": 46577, + "aam": 12943, + "aam": 22775, + "aama": 45018, + "aamaadmi": 45563, + "aamaadmiparty": 46406, + "aamir": 27456, + "aan": 20705, + "aan": 13426, + "aand": 38054, + "aap": 12023, + "aap": 12052, + "aapl": 34516, + "aar": 4695, + "aar": 13234, + "aard": 46932, + "aaron": 13948, + "aaron": 7709, + "aas": 28542, + "aas": 32205, + "aat": 34018, + "aat": 35004, + "aau": 35426, + "aay": 38281, + "aay": 40249, + "aaz": 26770, + "ab": 596, + "ab": 3937, + "aba": 44204, + "aba": 11102, + "abad": 33444, + "abad": 7155, + "aban": 41662, + "aband": 8595, + "abandon": 28805, + "abandoned": 11227, + "abar": 17860, + "abar": 39805, + "abas": 25402, + "abay": 43542, + "abb": 38954, + "abb": 38297, + "abba": 30870, + "abbas": 37494, + "abbas": 24412, + "abbey": 31927, + "abbey": 10132, + "abbie": 39949, + "abbo": 13536, + "abbot": 44046, + "abbott": 43737, + "abbott": 15649, + "abbrevi": 44843, + "abby": 30586, + "abby": 14694, + "abc": 13137, + "abc": 5334, + "abcnews": 31566, + "abd": 44093, + "abdel": 46511, + "abdomin": 35335, + "abdominal": 39328, + "abdu": 13361, + "abduc": 17884, + "abducted": 31520, + "abduction": 36984, + "abdul": 14227, + "abdul": 15593, + "abdullah": 21317, + "abe": 15856, + "abe": 12734, + "abee": 36037, + "abel": 31938, + "abel": 25318, + "abella": 46156, + "aben": 40865, + "aber": 7828, + "aber": 41867, + "aberdeen": 30539, + "aberdeen": 17236, + "abh": 27484, + "abh": 33649, + "abhcosmetics": 49189, + "abhi": 18113, + "abhin": 44045, + "abhishek": 44502, + "abi": 16867, + "abi": 14161, + "abia": 48604, + "abide": 49163, + "abig": 20863, + "abigail": 25686, + "abil": 21135, + "abilities": 8724, + "ability": 35146, + "ability": 3024, + "abit": 48668, + "ablanc": 33716, + "able": 10102, + "able": 863, + "abled": 10655, + "ableg": 24055, + "ables": 8486, + "ableton": 47169, + "ably": 6748, + "abnormal": 40934, + "abo": 2889, + "abo": 21861, + "aboard": 11661, + "abol": 31768, + "abolic": 46827, + "abolish": 47403, + "aboo": 42433, + "abor": 8416, + "aboriginal": 20422, + "abortion": 12336, + "abortions": 43218, + "aboss": 46401, + "abou": 36455, + "abou": 44053, + "abound": 41037, + "abour": 46637, + "about": 20204, + "about": 781, + "abouts": 36339, + "above": 35019, + "above": 4348, + "aboy": 37077, + "abpoli": 44779, + "abq": 38767, + "abr": 44932, + "abra": 10694, + "abra": 35087, + "abraham": 40623, + "abraham": 15869, + "abram": 33255, + "abrams": 29852, + "abre": 22472, + "abre": 46756, + "abri": 28605, + "abridged": 45333, + "abroad": 11253, + "abru": 46295, + "abs": 18431, + "abs": 11109, + "absc": 25389, + "abscbn": 44260, + "abscbn": 45810, + "absen": 32453, + "absence": 19240, + "absent": 30363, + "absol": 4624, + "absolu": 7055, + "absolut": 4666, + "absolute": 7501, + "absolutely": 4703, + "absor": 14303, + "absorb": 35806, + "absorbed": 45059, + "absorbing": 46412, + "absorption": 42210, + "abstr": 7530, + "abstract": 23885, + "abstract": 10197, + "abstractart": 31170, + "abstraction": 47696, + "abstracts": 40065, + "absur": 21639, + "absurd": 29757, + "abt": 9850, + "abu": 9167, + "abu": 11787, + "abud": 20180, + "abudha": 21450, + "abudhabi": 25256, + "abuja": 23371, + "abun": 20544, + "abundance": 23236, + "abundant": 31611, + "abur": 23377, + "aburger": 46660, + "abuse": 7678, + "abused": 23855, + "abuses": 37132, + "abusing": 36558, + "abusive": 26858, + "abv": 34172, + "aby": 16342, + "aby": 31378, + "abyss": 33632, + "abz": 42292, + "ac": 546, + "ac": 2816, + "aca": 9213, + "acab": 41388, + "acacia": 44047, + "acad": 32537, + "acade": 2892, + "academia": 22662, + "academic": 31178, + "academic": 7935, + "academics": 26417, + "academies": 42569, + "academy": 29968, + "academy": 4041, + "acadi": 41455, + "acadia": 49236, + "acam": 26172, + "acan": 42227, + "acan": 26318, + "acap": 32357, + "acar": 22232, + "acare": 16961, + "acc": 26805, + "acc": 9318, + "acca": 30883, + "acce": 8564, + "acceler": 10161, + "accelerate": 23619, + "accelerated": 38513, + "accelerating": 41821, + "acceleration": 39387, + "accelerator": 25261, + "accent": 28110, + "accent": 18931, + "accents": 31738, + "accenture": 41853, + "accep": 4616, + "accept": 16447, + "accept": 9338, + "acceptable": 14209, + "acceptance": 17090, + "accepted": 9159, + "accepting": 12855, + "accepts": 22338, + "access": 7596, + "access": 3822, + "accessi": 10787, + "accessibility": 23407, + "accessible": 13977, + "accessing": 46339, + "accessories": 10220, + "accessory": 20417, + "acci": 4263, + "acci": 33943, + "accident": 6608, + "accidental": 24895, + "accidentally": 11061, + "accidents": 22072, + "acclaimed": 21172, + "acco": 44730, + "accol": 33858, + "accolades": 46731, + "accom": 23658, + "accommo": 34495, + "accommod": 14386, + "accommodate": 34708, + "accommodation": 18066, + "accommodations": 45536, + "accomp": 24985, + "accompan": 14746, + "accompanied": 20715, + "accompany": 34142, + "accompanying": 38179, + "accompli": 10205, + "accomplish": 25542, + "accomplished": 16462, + "accomplishment": 26100, + "accomplishments": 24965, + "accor": 4182, + "accord": 34293, + "accord": 28513, + "according": 4717, + "accordingly": 35535, + "accordion": 48760, + "accoun": 3081, + "account": 18424, + "account": 4684, + "accountability": 19377, + "accountable": 24216, + "accountant": 31026, + "accountants": 37222, + "accounted": 43951, + "accounting": 14805, + "accounts": 9974, + "accra": 31900, + "accred": 17451, + "accreditation": 27015, + "accredited": 27647, + "acct": 45569, + "accu": 5618, + "accumul": 19275, + "accumulation": 37112, + "accur": 6551, + "accuracy": 18423, + "accurate": 8858, + "accurately": 24206, + "accusations": 33615, + "accuse": 39414, + "accused": 9434, + "accuses": 27496, + "accusing": 41474, + "acdc": 45067, + "ace": 2675, + "ace": 804, + "acea": 35219, + "aceae": 38153, + "acele": 40868, + "aceous": 33610, + "acer": 37990, + "acer": 25809, + "aces": 5725, + "acet": 28735, + "acf": 38389, + "ach": 972, + "ach": 987, + "acha": 22686, + "acharya": 45780, + "achat": 32706, + "ache": 27771, + "ache": 7214, + "ached": 17048, + "acher": 38442, + "acher": 17936, + "achers": 25051, + "aches": 14823, + "achi": 3264, + "achi": 9087, + "achiev": 8160, + "achieve": 14798, + "achieve": 8175, + "achieved": 12359, + "achievement": 8245, + "achievements": 16114, + "achiever": 46286, + "achievers": 44544, + "achieves": 40123, + "achieving": 16120, + "achilles": 33327, + "achim": 42335, + "aching": 12864, + "acho": 33130, + "achs": 41195, + "aci": 4359, + "aci": 34100, + "acia": 30163, + "acial": 32422, + "acid": 35474, + "acid": 10085, + "acidity": 48800, + "acids": 27751, + "acies": 20162, + "acin": 39442, + "acing": 9442, + "acio": 26202, + "acion": 44965, + "acion": 24968, + "acional": 26435, + "aciones": 35832, + "acious": 16020, + "acity": 7511, + "ación": 38175, + "ack": 877, + "ack": 725, + "acked": 5698, + "acker": 31201, + "acker": 7940, + "ackeray": 41843, + "acki": 42857, + "acking": 5515, + "ackles": 28503, + "acknow": 13563, + "acknowle": 18100, + "acknowledge": 25209, + "acknowledged": 35913, + "acknowledges": 49083, + "acknowledging": 45645, + "acks": 3858, + "acl": 47593, + "acl": 23073, + "acle": 6504, + "acles": 34164, + "aclu": 37354, + "acm": 39317, + "acmilan": 36500, + "acne": 24195, + "aco": 9463, + "aco": 8800, + "acol": 17431, + "acollege": 43468, + "acom": 17224, + "acom": 22342, + "acon": 11621, + "acon": 11571, + "aconf": 38851, + "acons": 31599, + "acor": 22076, + "acorn": 37537, + "acos": 39943, + "acosta": 31994, + "acou": 8794, + "acoun": 31295, + "acounty": 45449, + "acoustic": 10616, + "acoustics": 43873, + "acp": 19627, + "acqu": 7946, + "acquainted": 40713, + "acqui": 12194, + "acquire": 21576, + "acquired": 15932, + "acquires": 27376, + "acquiring": 42785, + "acquis": 14207, + "acquisition": 16543, + "acquisitions": 39649, + "acr": 43648, + "acre": 26749, + "acre": 9493, + "acres": 11630, + "acro": 21060, + "acrob": 40891, + "acron": 37770, + "across": 2500, + "acrosse": 40979, + "acruz": 40455, + "acry": 10440, + "acrylic": 12252, + "acs": 11782, + "act": 10305, + "act": 1393, + "acted": 10971, + "acti": 4786, + "acting": 6319, + "action": 12493, + "action": 1816, + "actions": 6271, + "activ": 3430, + "activate": 26737, + "activated": 22249, + "activation": 26769, + "active": 19009, + "active": 4046, + "actively": 18645, + "activi": 7230, + "activism": 20117, + "activist": 10850, + "activists": 12649, + "activities": 6514, + "activity": 6206, + "actment": 44807, + "acton": 36167, + "acton": 36697, + "actonclimate": 43797, + "actor": 12181, + "actor": 4035, + "actors": 9255, + "actorslife": 25117, + "actorvijay": 34033, + "actress": 5805, + "actresses": 33639, + "acts": 6816, + "actu": 2375, + "actual": 7488, + "actually": 2955, + "acu": 9204, + "acu": 48475, + "aculture": 38145, + "acup": 30869, + "acup": 27278, + "acupuncture": 40043, + "acur": 44719, + "acura": 30120, + "acus": 33710, + "acute": 19734, + "acy": 18717, + "acy": 2356, + "ad": 594, + "ad": 680, + "ada": 25785, + "ada": 1886, + "adaily": 47254, + "adal": 46646, + "adam": 6037, + "adam": 4944, + "adamlambert": 27659, + "adams": 7942, + "adan": 41802, + "adani": 37499, + "adap": 6341, + "adapt": 22666, + "adaptation": 16566, + "adapted": 26657, + "adapter": 21839, + "adapting": 44120, + "adaptive": 28672, + "adar": 27702, + "adar": 32681, + "adas": 23250, + "adata": 39500, + "aday": 31367, + "aday": 10280, + "adays": 24337, + "adb": 45630, + "adc": 38201, + "add": 19408, + "add": 3536, + "addams": 38912, + "added": 4149, + "adder": 47557, + "addi": 36378, + "addic": 5709, + "addict": 14614, + "addicted": 16275, + "addiction": 11751, + "addictive": 29638, + "addicts": 29997, + "adding": 8676, + "addis": 43911, + "addison": 32369, + "additi": 26927, + "addition": 6698, + "additional": 10666, + "additions": 22575, + "additive": 48546, + "addo": 40001, + "address": 5834, + "addressed": 20817, + "addresses": 12702, + "addressing": 10594, + "adds": 9944, + "addy": 24746, + "ade": 2194, + "ade": 1928, + "adecides": 46374, + "aded": 9994, + "adee": 47054, + "adel": 4434, + "adel": 27308, + "adelaide": 38193, + "adelaide": 11611, + "adele": 42843, + "adele": 21220, + "adelrey": 43627, + "ademy": 49123, + "aden": 28669, + "aden": 28688, + "adena": 23648, + "adequ": 18232, + "adequate": 22281, + "ader": 21365, + "adero": 49185, + "aders": 27672, + "ades": 5793, + "adh": 42301, + "adhd": 32649, + "adhe": 21175, + "adhesive": 38429, + "adi": 2486, + "adi": 8779, + "adia": 26874, + "adic": 36780, + "adid": 8086, + "adidas": 22396, + "adidas": 9589, + "adidasoriginals": 48575, + "adies": 45834, + "adifference": 37217, + "adilla": 41167, + "ading": 15000, + "adio": 15060, + "adirond": 36843, + "adish": 49009, + "adity": 28596, + "aditya": 37186, + "adityanath": 44437, + "adjac": 32517, + "adjacent": 33836, + "adjec": 45512, + "adju": 16413, + "adjun": 45995, + "adjust": 13784, + "adjust": 28073, + "adjustable": 20476, + "adjusted": 30515, + "adjusting": 41132, + "adjustment": 36081, + "adjustments": 36331, + "adl": 49351, + "adler": 30222, + "adm": 9892, + "adm": 33604, + "admi": 11666, + "admin": 12528, + "admini": 6434, + "administr": 12174, + "administration": 9502, + "administrative": 22424, + "administrator": 22603, + "administrators": 36123, + "admins": 49297, + "admir": 17031, + "admiral": 21013, + "admiration": 39569, + "admire": 17791, + "admired": 36103, + "admirer": 48344, + "admiring": 29835, + "admission": 11315, + "admissions": 22463, + "admit": 13769, + "admits": 16332, + "admitted": 20427, + "admitting": 46148, + "adn": 40339, + "adnan": 42037, + "ado": 4775, + "ado": 2933, + "adobe": 29256, + "adobe": 16484, + "adog": 44913, + "adol": 33512, + "adole": 22704, + "adolescent": 36793, + "adolescents": 45656, + "adolf": 41179, + "adon": 25907, + "adona": 48419, + "adop": 4183, + "adopt": 16441, + "adopt": 11159, + "adoptable": 36905, + "adoptdont": 19674, + "adoptdontshop": 20089, + "adopted": 12538, + "adopting": 30158, + "adoption": 11544, + "adopts": 40853, + "ador": 4992, + "ador": 9162, + "adora": 40031, + "adorable": 6298, + "adoration": 46781, + "adore": 15502, + "adored": 49233, + "adores": 30290, + "adorned": 44953, + "ados": 20079, + "adox": 32188, + "adp": 44426, + "adr": 46189, + "adren": 24204, + "adrenaline": 35552, + "adri": 5935, + "adrian": 25012, + "adrian": 13163, + "adriana": 41363, + "adrid": 26562, + "adrien": 47469, + "adrienne": 40081, + "ads": 2485, + "adu": 16882, + "adu": 24446, + "adukone": 30511, + "adul": 7222, + "adult": 42209, + "adult": 7115, + "adulthood": 40964, + "adults": 9391, + "adv": 1647, + "adv": 21018, + "advan": 33411, + "advance": 27291, + "advance": 7022, + "advanced": 7465, + "advancement": 35437, + "advances": 15852, + "advancing": 21355, + "advani": 48189, + "advant": 7017, + "advantage": 8573, + "advantaged": 38361, + "advantages": 23506, + "adven": 41670, + "advent": 3071, + "advent": 15199, + "adventcalendar": 43492, + "adventur": 29627, + "adventure": 17251, + "adventure": 4377, + "adventurer": 48098, + "adventures": 7941, + "adventurous": 31179, + "adver": 4806, + "adverse": 30348, + "adversity": 32516, + "advert": 19080, + "adverti": 5682, + "advertise": 31473, + "advertised": 38987, + "advertisement": 18713, + "advertiser": 41829, + "advertisers": 45472, + "advertising": 8158, + "adverts": 44306, + "advice": 4973, + "advis": 4634, + "advise": 25962, + "advised": 23196, + "adviser": 20367, + "advisers": 40984, + "advises": 42761, + "advising": 39648, + "advisor": 12380, + "advisors": 23197, + "advisory": 10224, + "advoc": 6657, + "advocacy": 14443, + "advocate": 12044, + "advocates": 17757, + "adwords": 48343, + "ady": 41446, + "ady": 8781, + "ae": 5548, + "ae": 4542, + "aea": 37048, + "aed": 26912, + "aege": 42304, + "ael": 41533, + "ael": 43340, + "aen": 43085, + "aer": 10195, + "aeri": 27685, + "aerial": 44866, + "aerial": 12440, + "aero": 10196, + "aero": 25026, + "aerob": 42824, + "aeron": 37286, + "aeronau": 42816, + "aerop": 27735, + "aerosmith": 43253, + "aerospace": 20530, + "aes": 10617, + "aes": 35677, + "aest": 40694, + "aesthe": 21181, + "aesthetic": 16179, + "aesthetics": 29295, + "aew": 47108, + "af": 702, + "af": 4391, + "afa": 24953, + "afan": 47474, + "afar": 41637, + "afar": 37866, + "afb": 27022, + "afc": 29742, + "afc": 6571, + "afcb": 44276, + "afcon": 30019, + "afd": 44626, + "afe": 30487, + "afe": 13912, + "afer": 44707, + "aff": 8849, + "aff": 14864, + "affair": 13998, + "affairs": 9830, + "affe": 4556, + "affect": 11361, + "affected": 9715, + "affecting": 18448, + "affection": 33780, + "affection": 28381, + "affectionate": 42578, + "affects": 17285, + "affili": 12120, + "affiliate": 18652, + "affiliated": 37540, + "affiliation": 48377, + "affinity": 41451, + "affir": 25343, + "affirm": 42711, + "affirm": 48625, + "affirmation": 47495, + "affl": 34036, + "affleck": 35584, + "afford": 7951, + "afford": 13223, + "affordability": 44828, + "affordable": 43944, + "affordable": 8926, + "afg": 33994, + "afgh": 9029, + "afghan": 15919, + "afghanistan": 9836, + "afi": 24074, + "afi": 31958, + "afil": 27209, + "afire": 42010, + "afirst": 38601, + "afl": 15132, + "afl": 14356, + "aflo": 41959, + "afm": 38385, + "afootball": 41694, + "afor": 43102, + "afore": 41468, + "afp": 18311, + "afraid": 9474, + "afri": 13888, + "afric": 2136, + "africa": 3093, + "african": 17471, + "african": 4736, + "africans": 26534, + "afridi": 37651, + "afrika": 45833, + "afrin": 45586, + "afro": 16267, + "afro": 21795, + "afs": 48960, + "aft": 22693, + "after": 2278, + "after": 953, + "afterdark": 48966, + "afterlife": 46790, + "aftermath": 20958, + "afterno": 22330, + "afternoon": 39035, + "afternoon": 2716, + "afternoons": 31631, + "afterparty": 35305, + "afterwards": 23911, + "ag": 602, + "ag": 5241, + "aga": 1050, + "aga": 4654, + "again": 1495, + "against": 23838, + "against": 1601, + "agame": 46943, + "agan": 42946, + "agan": 9178, + "agar": 13199, + "agar": 17544, + "agarwal": 43117, + "agas": 20430, + "agate": 25454, + "agatha": 43896, + "agave": 42671, + "agawa": 39433, + "agazine": 44942, + "age": 4758, + "age": 805, + "aged": 3889, + "ageing": 25349, + "agen": 10101, + "agen": 43696, + "agencies": 13887, + "agency": 44885, + "agency": 6270, + "agend": 48653, + "agenda": 8728, + "agent": 21210, + "agent": 6576, + "agents": 10199, + "agentsof": 37074, + "agentsofshield": 38801, + "ager": 44847, + "ager": 10443, + "agers": 22123, + "ages": 2321, + "agg": 45482, + "aggarwal": 39386, + "agger": 27836, + "aggi": 36844, + "aggie": 44244, + "aggie": 37618, + "aggies": 31047, + "aggio": 36685, + "aggrav": 35203, + "aggre": 10426, + "aggreg": 41968, + "aggregate": 41318, + "aggression": 28900, + "aggressive": 16295, + "aggressively": 48667, + "agh": 17917, + "agh": 14402, + "aghan": 31276, + "agi": 24036, + "agi": 17645, + "agic": 37652, + "agile": 16276, + "agility": 32161, + "aging": 4336, + "agio": 41746, + "agirl": 35469, + "agle": 37035, + "agle": 16702, + "agles": 36374, + "agles": 22679, + "aglia": 46912, + "agm": 19162, + "agn": 36474, + "agna": 43626, + "agne": 29374, + "agne": 48303, + "agnes": 26213, + "agno": 41540, + "ago": 6276, + "ago": 1468, + "agomez": 27127, + "agon": 26775, + "agon": 14901, + "agony": 36977, + "agor": 38920, + "agos": 32657, + "agov": 34227, + "agp": 46048, + "agr": 36639, + "agra": 26660, + "agra": 29830, + "agram": 2447, + "agre": 3180, + "agreat": 37594, + "agree": 5953, + "agreed": 12774, + "agreeing": 40720, + "agreement": 8286, + "agreements": 25865, + "agrees": 17854, + "agri": 20527, + "agri": 30326, + "agricul": 7234, + "agricultural": 15440, + "agriculture": 9720, + "agro": 33178, + "agro": 44589, + "agron": 41314, + "agroup": 40099, + "ags": 16926, + "agt": 39681, + "agu": 3922, + "agu": 36544, + "agua": 18482, + "aguchi": 49206, + "ague": 2095, + "aguero": 42964, + "agues": 7000, + "aguil": 27946, + "aguilar": 44715, + "ah": 1772, + "ah": 1288, + "aha": 12082, + "aha": 8429, + "ahah": 38661, + "ahaha": 32423, + "ahahaha": 42620, + "aham": 36036, + "ahan": 45061, + "ahan": 19255, + "ahar": 31038, + "ahar": 38760, + "ahe": 27688, + "ahead": 3158, + "ahem": 39995, + "ahh": 13152, + "ahhh": 14769, + "ahhhh": 21054, + "ahhhhh": 36392, + "ahi": 45349, + "ahi": 24154, + "ahl": 30433, + "ahmad": 32167, + "ahmad": 16902, + "ahmadi": 38656, + "ahmadiyya": 44865, + "ahmed": 19491, + "ahmed": 12081, + "ahmedabad": 26966, + "ahn": 33405, + "aho": 28114, + "aho": 38444, + "ahora": 43113, + "ahouse": 33197, + "ahoy": 38652, + "ahs": 16937, + "ahu": 11908, + "ahu": 16515, + "ai": 2014, + "ai": 2215, + "aia": 27046, + "aib": 34780, + "aic": 29454, + "aid": 13723, + "aid": 5182, + "aida": 33830, + "aidan": 48814, + "aidan": 26945, + "aide": 31558, + "aide": 9746, + "aided": 48707, + "aiden": 40020, + "aides": 49082, + "aids": 11759, + "aig": 27295, + "aig": 46989, + "aii": 22478, + "aik": 42575, + "aiken": 46342, + "ail": 1457, + "ail": 9154, + "ailed": 38919, + "ailing": 29999, + "ails": 27024, + "aim": 6787, + "aim": 11255, + "aime": 39872, + "aimed": 20247, + "aimee": 36318, + "aiming": 21768, + "aimo": 36706, + "aims": 13326, + "ain": 8326, + "ain": 2210, + "aine": 48983, + "aine": 17634, + "ains": 27621, + "aint": 29543, + "aint": 13099, + "ainted": 39933, + "aioli": 43949, + "air": 1281, + "air": 1922, + "aira": 35085, + "aira": 46444, + "airasia": 48020, + "airbnb": 23098, + "airborne": 22755, + "airbus": 15324, + "aircraft": 7706, + "airdrop": 38434, + "aire": 7682, + "aired": 21938, + "aires": 17034, + "airfield": 40525, + "airforce": 23511, + "airing": 20453, + "airline": 14847, + "airlines": 8929, + "airmen": 44499, + "airplane": 16451, + "airplanes": 33319, + "airplay": 47024, + "airpollution": 47362, + "airport": 48337, + "airport": 3259, + "airports": 21543, + "airs": 18539, + "airshow": 27139, + "airsoft": 30134, + "airspace": 49280, + "airstrikes": 37220, + "airtel": 34784, + "airtime": 46617, + "airwaves": 43910, + "airways": 14299, + "airy": 44453, + "ais": 7616, + "ais": 11393, + "aise": 30505, + "aish": 21946, + "aisha": 40211, + "aishwar": 29687, + "aishwarya": 44019, + "aisle": 26917, + "ait": 25613, + "ait": 40814, + "aj": 3990, + "aj": 6342, + "aja": 42343, + "aja": 19633, + "ajax": 21933, + "ajay": 22494, + "ajay": 28726, + "ajaydevgn": 35515, + "aje": 48818, + "aje": 33315, + "ajes": 38791, + "aji": 26102, + "aji": 21153, + "ajit": 42261, + "ajith": 24118, + "ajo": 26958, + "aju": 36855, + "ak": 819, + "ak": 1196, + "aka": 19154, + "aka": 3412, + "akaif": 45736, + "akan": 43678, + "akan": 38244, + "akapoor": 40064, + "akarta": 48603, + "akb": 41962, + "akbar": 27180, + "ake": 10558, + "ake": 5776, + "aked": 6115, + "aker": 14245, + "aker": 3074, + "akers": 5788, + "akes": 4764, + "akest": 46679, + "akh": 14821, + "akh": 30660, + "akhan": 28158, + "akhi": 41660, + "akhilesh": 48495, + "akhtar": 45458, + "aki": 18173, + "aki": 6592, + "akin": 24630, + "akin": 13601, + "aking": 1809, + "akins": 48568, + "akira": 34001, + "akis": 27732, + "akistan": 46221, + "akley": 39908, + "ako": 44027, + "ako": 14541, + "akon": 47105, + "akos": 44659, + "akrish": 37434, + "akron": 26115, + "aks": 2953, + "aksh": 28226, + "akshay": 21483, + "akshay": 38914, + "akshaykumar": 23624, + "akshi": 42634, + "aku": 18151, + "aku": 20815, + "aky": 11977, + "al": 526, + "al": 566, + "ala": 12783, + "ala": 3449, + "alab": 6365, + "alabam": 45880, + "alabama": 8422, + "alach": 24622, + "alad": 23074, + "aladdin": 29951, + "alai": 47072, + "alain": 28999, + "alam": 16612, + "alam": 16012, + "alamo": 41922, + "alamo": 34632, + "alan": 9563, + "alan": 5773, + "alana": 43405, + "aland": 34304, + "aland": 6819, + "alar": 34333, + "alarm": 11321, + "alarming": 37209, + "alarms": 31236, + "alarts": 31422, + "alas": 7276, + "alas": 22412, + "alaska": 9562, + "alaskan": 33898, + "alastair": 42062, + "alay": 30289, + "alay": 36450, + "alaya": 36397, + "alb": 45248, + "alba": 25254, + "alban": 10882, + "albania": 29170, + "albanian": 47721, + "albans": 44119, + "albany": 17359, + "albat": 42797, + "albeit": 38984, + "alber": 6413, + "albert": 34174, + "albert": 9507, + "alberta": 11048, + "alberto": 22714, + "albi": 18512, + "albino": 48062, + "albion": 24071, + "albu": 2216, + "album": 40712, + "album": 2431, + "albums": 10705, + "albuquerque": 31079, + "alcat": 35361, + "alche": 37909, + "alchemist": 38913, + "alchemy": 39501, + "alco": 6848, + "alco": 45446, + "alcohol": 9426, + "alcoholic": 25098, + "ald": 4539, + "ald": 2928, + "alda": 46440, + "alde": 33114, + "alden": 17155, + "alden": 27710, + "aldenrichards": 20051, + "alder": 18220, + "alder": 46571, + "aldi": 23204, + "aldo": 9933, + "aldridge": 38084, + "alds": 14285, + "aldu": 6505, + "aldub": 10532, + "aldub": 15247, + "ale": 1440, + "ale": 1336, + "alea": 26518, + "aleague": 38909, + "alec": 29804, + "alec": 19954, + "alecoscino": 47948, + "aled": 4970, + "alee": 24515, + "alej": 23440, + "alejandro": 32950, + "alek": 26906, + "alek": 43310, + "aleksand": 48429, + "alem": 11825, + "aleppo": 19258, + "aler": 25674, + "aler": 27335, + "alert": 4662, + "alerts": 22144, + "ales": 44171, + "ales": 5962, + "aless": 21864, + "alessandro": 37344, + "alestine": 31945, + "alex": 2959, + "alex": 4134, + "alexa": 16273, + "alexand": 10696, + "alexander": 25527, + "alexander": 7563, + "alexandra": 19054, + "alexandre": 35711, + "alexandria": 21171, + "alexis": 35023, + "alexis": 14243, + "aley": 21635, + "alf": 27098, + "alfa": 23482, + "alfar": 38870, + "alfie": 28598, + "alfon": 31947, + "alfonso": 41784, + "alfre": 20982, + "alfred": 16553, + "alfredo": 32291, + "algae": 25654, + "algar": 36291, + "algarve": 40290, + "alge": 24336, + "algebra": 33694, + "alger": 18568, + "algeria": 25257, + "algon": 33007, + "algori": 14912, + "algorithm": 23295, + "algorithms": 26039, + "alham": 23352, + "alhamdulil": 35129, + "alhamdulillah": 38982, + "ali": 835, + "ali": 3558, + "alia": 2492, + "aliaa": 36468, + "alian": 3464, + "alias": 40026, + "alibaba": 39231, + "alic": 25265, + "alice": 23759, + "alice": 9192, + "alici": 31630, + "alicia": 20914, + "alie": 8697, + "alien": 22846, + "alien": 9639, + "aliens": 14883, + "alier": 39493, + "alies": 38086, + "alife": 41347, + "alife": 21100, + "alig": 21272, + "alight": 36157, + "align": 31160, + "aligned": 29292, + "alignment": 27267, + "alik": 31141, + "alike": 12665, + "alim": 42075, + "alin": 42746, + "alin": 40063, + "alina": 39529, + "aline": 21799, + "aling": 5169, + "alion": 19049, + "alis": 21308, + "alis": 20114, + "alisa": 38918, + "alisation": 42143, + "alise": 36718, + "alised": 25099, + "alism": 5607, + "alison": 28653, + "alison": 16970, + "alist": 44900, + "alist": 3320, + "alistair": 40551, + "alistic": 22302, + "alists": 5653, + "alit": 45566, + "alities": 27925, + "ality": 1694, + "alive": 40467, + "alive": 4716, + "aliz": 30979, + "alization": 8026, + "alize": 10268, + "alized": 6141, + "alizer": 38922, + "alizes": 26181, + "alizing": 13023, + "alk": 30246, + "alk": 21577, + "alkal": 33450, + "alkaline": 39210, + "all": 813, + "all": 615, + "alla": 13884, + "alla": 14000, + "allabout": 43996, + "allah": 6378, + "allan": 36552, + "allan": 15404, + "allblacks": 47728, + "allday": 35862, + "alle": 4870, + "alle": 29478, + "alled": 7379, + "alleg": 7456, + "allegations": 16992, + "alleged": 12133, + "allegedly": 14177, + "alleges": 45051, + "allegh": 41479, + "allegheny": 47851, + "allegi": 28832, + "allegiance": 30955, + "allen": 16712, + "allen": 6386, + "allenge": 31387, + "aller": 10116, + "aller": 30630, + "allergic": 28809, + "allergies": 28247, + "allergy": 24408, + "allery": 32542, + "alles": 43354, + "allevi": 31682, + "alleviate": 44799, + "alley": 36205, + "alley": 10329, + "allez": 49137, + "alli": 4123, + "alli": 15268, + "alliance": 45404, + "alliance": 8945, + "alliances": 48403, + "allianz": 45740, + "allie": 25040, + "allied": 20045, + "allies": 17277, + "alligator": 28574, + "allin": 45007, + "allin": 22395, + "alline": 48182, + "alling": 2992, + "allis": 45309, + "allison": 34602, + "allison": 16578, + "allman": 42611, + "allo": 8107, + "allo": 18389, + "allocated": 42716, + "allocation": 35139, + "allon": 46693, + "allot": 26363, + "allotment": 33750, + "allow": 5645, + "allow": 6722, + "allowance": 35696, + "allowed": 7885, + "allowing": 12458, + "allows": 9966, + "alloy": 22467, + "alls": 1997, + "allstar": 31247, + "allstar": 22974, + "allstars": 31198, + "allthe": 29253, + "allu": 20157, + "alluarjun": 39333, + "allure": 41814, + "ally": 7461, + "ally": 769, + "alm": 28303, + "alma": 32933, + "alma": 18337, + "alman": 29394, + "almanac": 41268, + "almighty": 21898, + "almond": 15646, + "almonds": 30468, + "almost": 47534, + "almost": 2671, + "aln": 47203, + "alo": 3435, + "alo": 6183, + "aloe": 30728, + "alog": 15813, + "alogue": 9101, + "aloha": 23160, + "aloils": 49002, + "alom": 22236, + "alon": 14097, + "alon": 42846, + "alone": 4702, + "along": 8300, + "along": 2528, + "alongside": 8646, + "alonso": 25704, + "aloo": 46187, + "alore": 14323, + "alot": 16945, + "alou": 43180, + "aloud": 30028, + "alove": 46669, + "alove": 37045, + "alp": 32020, + "alp": 39342, + "alpac": 30128, + "alpaca": 42561, + "alph": 6720, + "alpha": 11807, + "alpha": 8624, + "alphabe": 45796, + "alphabet": 22335, + "alphon": 37865, + "alpine": 17055, + "alps": 18191, + "already": 2426, + "alright": 10866, + "als": 23982, + "als": 938, + "alsace": 49388, + "also": 1446, + "alt": 9995, + "alt": 10006, + "alta": 24470, + "alta": 25378, + "altaf": 47342, + "altam": 45624, + "altar": 16385, + "alter": 4949, + "alter": 21393, + "altered": 25201, + "altern": 47463, + "alternate": 15926, + "alternati": 16699, + "alternative": 37327, + "alternative": 8248, + "alternatives": 25041, + "alth": 23463, + "alth": 5863, + "although": 9421, + "alti": 35531, + "alties": 17276, + "altitude": 23241, + "altman": 48100, + "alto": 35053, + "alto": 17518, + "altogether": 45689, + "alton": 41331, + "alton": 36550, + "altrin": 38458, + "altrincham": 44718, + "alty": 5546, + "alu": 4776, + "alu": 27991, + "alum": 5404, + "alum": 10553, + "alumin": 14563, + "alumini": 22908, + "aluminium": 23631, + "aluminum": 15251, + "alumna": 30313, + "alumni": 6646, + "alumnus": 23633, + "alums": 30155, + "alv": 20928, + "alvar": 25196, + "alvarez": 26924, + "alvaro": 41941, + "alves": 38547, + "alvin": 27023, + "alway": 14046, + "alway": 43764, + "always": 24997, + "always": 1466, + "alwx": 32768, + "aly": 6468, + "aly": 12910, + "alyn": 49150, + "alyss": 29490, + "alyssa": 18898, + "alz": 12936, + "alz": 41128, + "alzheim": 15212, + "alzheimer": 21151, + "alzheimers": 34592, + "am": 548, + "am": 687, + "ama": 18206, + "ama": 1696, + "amad": 45095, + "amade": 37366, + "amag": 32049, + "amal": 15315, + "amal": 36753, + "aman": 19890, + "aman": 10110, + "amand": 14560, + "amanda": 10036, + "amar": 6424, + "amar": 19607, + "amara": 48522, + "amari": 42565, + "amarillo": 40449, + "amarine": 45591, + "amarketing": 30788, + "amas": 22716, + "amas": 15667, + "amat": 38664, + "amat": 25455, + "amate": 12453, + "amateur": 14287, + "amaya": 47210, + "amaz": 1185, + "amaze": 24846, + "amazed": 18944, + "amazing": 15949, + "amazing": 1370, + "amazingly": 20368, + "amazon": 13630, + "amazon": 4140, + "amb": 9042, + "amb": 16853, + "amba": 27003, + "ambani": 45967, + "ambas": 5634, + "ambassad": 5758, + "ambassador": 6795, + "ambassadors": 16832, + "ambed": 42089, + "ambedkar": 48131, + "amber": 18292, + "amber": 9986, + "ambi": 11844, + "ambient": 23447, + "ambigu": 35702, + "ambition": 20673, + "ambitions": 34152, + "ambitious": 18666, + "ambro": 17585, + "ambrose": 24253, + "ambu": 34423, + "ambul": 13944, + "ambulance": 15555, + "ambush": 40725, + "amc": 24942, + "amc": 16921, + "amd": 20845, + "ame": 3995, + "ame": 780, + "amed": 5660, + "ameen": 24229, + "amel": 31988, + "amel": 10960, + "ameli": 21599, + "amelia": 21433, + "amell": 48198, + "amen": 18716, + "amen": 12335, + "amend": 12425, + "amendment": 15019, + "amendments": 40901, + "amenities": 30096, + "ament": 27528, + "amer": 17081, + "amer": 16147, + "ameri": 40422, + "americ": 1283, + "america": 2224, + "americafirst": 43216, + "american": 8746, + "american": 2151, + "americana": 26221, + "americanair": 42538, + "americani": 39726, + "americans": 6676, + "americas": 33343, + "americas": 18142, + "ames": 5469, + "ameter": 23393, + "amethy": 30291, + "amethyst": 31485, + "amex": 46390, + "amg": 21324, + "amher": 32311, + "amherst": 39065, + "ami": 6100, + "ami": 3065, + "amic": 25824, + "amic": 21383, + "amid": 18908, + "amid": 11953, + "amide": 30952, + "amidst": 25172, + "amie": 36901, + "amig": 40294, + "amiga": 35329, + "amigo": 44991, + "amigos": 28176, + "amii": 35462, + "amiibo": 38871, + "amily": 36732, + "amin": 14337, + "amin": 20235, + "amina": 47531, + "amination": 30355, + "amine": 35823, + "aming": 3507, + "amino": 33464, + "amir": 26029, + "amir": 21973, + "amis": 29829, + "amish": 24958, + "amit": 15083, + "amit": 25255, + "amitabh": 48124, + "amitshah": 32374, + "aml": 43185, + "amma": 29786, + "amman": 29243, + "ammo": 33474, + "ammunition": 35060, + "amn": 24073, + "amne": 14596, + "amnesia": 41741, + "amnesty": 46330, + "amnesty": 21177, + "amo": 4833, + "amo": 11156, + "amodi": 9826, + "amon": 17492, + "amon": 24046, + "among": 12310, + "among": 4265, + "amongst": 12520, + "amoo": 26977, + "amor": 19977, + "amor": 15973, + "amore": 38937, + "amore": 22691, + "amores": 36338, + "amos": 18133, + "amoto": 25492, + "amount": 6403, + "amounts": 16747, + "amour": 29908, + "amovie": 41062, + "amp": 3521, + "amp": 6259, + "amped": 22640, + "amphi": 16379, + "amphibious": 45206, + "amphitheater": 41285, + "amphitheatre": 44039, + "ample": 34162, + "amples": 14536, + "ampli": 15647, + "amplifier": 31743, + "amplify": 45308, + "amps": 19252, + "ampton": 29410, + "ampton": 9347, + "amr": 30916, + "amreading": 16546, + "amrit": 33849, + "ams": 1396, + "amster": 9110, + "amsterdam": 9441, + "amtrak": 27855, + "amu": 11347, + "amu": 32336, + "amur": 35014, + "amura": 35487, + "amus": 36269, + "amuse": 21421, + "amuse": 44367, + "amused": 30212, + "amusement": 32570, + "amusic": 20266, + "amusing": 31789, + "amwriting": 9660, + "amy": 10547, + "amy": 5187, + "an": 514, + "an": 550, + "ana": 6588, + "ana": 1388, + "anab": 34742, + "anada": 27948, + "anag": 12115, + "anagh": 40774, + "anaheim": 23728, + "anak": 34814, + "anak": 38658, + "anal": 2785, + "analo": 34179, + "analog": 19963, + "analogue": 46031, + "analy": 4611, + "analyse": 47246, + "analyses": 39695, + "analysis": 5296, + "analyst": 14198, + "analysts": 28075, + "analytical": 34550, + "analytics": 8558, + "analyze": 28519, + "analyzing": 32107, + "anam": 29525, + "anan": 37215, + "anand": 25073, + "anand": 22083, + "anap": 41566, + "anarch": 46405, + "anarchi": 39879, + "anarchy": 27707, + "anas": 31382, + "anas": 12633, + "anast": 48902, + "anasta": 22915, + "anastasi": 36534, + "anastasia": 37975, + "anat": 10045, + "anath": 31277, + "anatom": 33759, + "anatomy": 15376, + "anc": 1124, + "anc": 17758, + "anca": 14583, + "ance": 7165, + "ance": 884, + "anced": 5071, + "ancer": 17415, + "ancers": 37296, + "ances": 3515, + "ancestor": 43904, + "ancestors": 24405, + "ancestral": 41615, + "ancestry": 30922, + "anch": 9489, + "anche": 34679, + "ancho": 26610, + "anchor": 20030, + "anchor": 13201, + "anchorage": 31950, + "anchored": 45926, + "anchors": 37830, + "anci": 4192, + "ancient": 31495, + "ancient": 5810, + "ancies": 21647, + "ancing": 7797, + "anco": 15459, + "ancy": 16282, + "ancy": 3633, + "and": 672, + "and": 537, + "anda": 2911, + "andalu": 31443, + "andco": 36302, + "ande": 26889, + "ande": 30354, + "ander": 3740, + "ander": 3935, + "anders": 10880, + "andersen": 32661, + "anderson": 26683, + "anderson": 6510, + "andes": 24052, + "andfriends": 36871, + "andhi": 21617, + "andhra": 32452, + "andi": 28870, + "andi": 14354, + "andie": 46318, + "andme": 42831, + "ando": 35950, + "ando": 5986, + "andolan": 48965, + "andon": 36488, + "andor": 45243, + "andover": 44177, + "andr": 22661, + "andra": 46795, + "andra": 21730, + "andre": 2657, + "andre": 9400, + "andrea": 10895, + "andreas": 20444, + "andrei": 42137, + "andres": 25197, + "andretti": 44291, + "andrew": 11717, + "andrew": 4847, + "andrews": 14506, + "andri": 37208, + "andro": 4417, + "andro": 17980, + "android": 24284, + "android": 5191, + "androidgames": 46572, + "andromeda": 42942, + "andré": 35609, + "ands": 32257, + "andthe": 22111, + "andu": 44200, + "andum": 47266, + "andy": 9447, + "andy": 2888, + "ane": 5846, + "ane": 3051, + "anec": 33965, + "anem": 41395, + "anemone": 49019, + "aneous": 48273, + "anes": 15381, + "anese": 48778, + "anesthe": 30622, + "anesthesia": 43353, + "anew": 39084, + "anew": 47341, + "anews": 20919, + "aney": 22387, + "anfield": 26993, + "ang": 883, + "ang": 2704, + "anga": 11641, + "angames": 43178, + "angan": 28264, + "angas": 46180, + "ange": 2960, + "ange": 3039, + "angel": 5029, + "angel": 5130, + "angela": 12354, + "angeles": 7382, + "angeli": 15265, + "angelic": 41038, + "angelica": 38582, + "angelina": 28890, + "angelo": 14342, + "angelou": 41328, + "angels": 7809, + "anger": 32737, + "anger": 6788, + "angerous": 39716, + "angers": 29756, + "angh": 34030, + "angi": 28003, + "angi": 24301, + "angie": 18859, + "angle": 21749, + "angle": 6946, + "angled": 32322, + "angler": 22284, + "anglers": 41608, + "angles": 18627, + "anglesey": 31850, + "anglia": 32076, + "anglic": 28322, + "anglican": 33284, + "angling": 36824, + "anglo": 39515, + "anglo": 30408, + "ango": 19090, + "angola": 36636, + "angor": 41740, + "angp": 19992, + "angry": 33910, + "angry": 9054, + "angs": 18441, + "angst": 41714, + "angu": 11209, + "angular": 43584, + "angular": 24981, + "angularjs": 48608, + "angus": 19688, + "ani": 1326, + "ani": 3624, + "ania": 9866, + "anian": 9945, + "anians": 39393, + "anic": 23113, + "anie": 26697, + "anie": 7671, + "anil": 28589, + "anil": 34619, + "anim": 2190, + "animal": 10697, + "animal": 4668, + "animalrights": 42859, + "animals": 4995, + "animate": 40076, + "animated": 13360, + "animation": 10344, + "animations": 42870, + "animator": 42591, + "anime": 23314, + "anime": 6469, + "anin": 45735, + "aning": 30972, + "anir": 27089, + "anirud": 35278, + "anirudhofficial": 45917, + "anis": 40986, + "anis": 47556, + "anism": 20947, + "anist": 16729, + "anistan": 9727, + "aniston": 47344, + "anit": 23683, + "anita": 18544, + "anium": 14794, + "anj": 22443, + "anja": 43440, + "anjali": 38834, + "anjo": 47353, + "ank": 13339, + "ank": 10029, + "anka": 45324, + "ankara": 34309, + "ankle": 14777, + "ankles": 48688, + "ann": 850, + "ann": 5424, + "anna": 13821, + "anna": 2160, + "annab": 22336, + "annabelle": 47661, + "annah": 39166, + "annah": 14327, + "annak": 41720, + "annan": 32166, + "annapolis": 34491, + "annas": 48467, + "anne": 9139, + "anne": 4083, + "anned": 27352, + "anner": 12642, + "annes": 24343, + "annette": 36821, + "annex": 42958, + "annex": 46389, + "anni": 2438, + "anni": 13728, + "annie": 37270, + "annie": 12173, + "annies": 43184, + "annihil": 32734, + "annis": 24742, + "anniv": 31399, + "anniver": 29671, + "annivers": 42836, + "anniversaire": 30882, + "anniversary": 3048, + "anno": 9901, + "anno": 26871, + "annon": 26385, + "annot": 30411, + "announ": 1806, + "announce": 3682, + "announced": 4103, + "announcement": 6932, + "announcements": 23735, + "announcer": 33626, + "announces": 6500, + "announcing": 11593, + "annoy": 45138, + "annoyed": 29863, + "annoying": 15248, + "annu": 21698, + "annual": 2906, + "annually": 23703, + "anny": 34313, + "anny": 5291, + "ano": 5617, + "ano": 2658, + "anom": 21612, + "anomaly": 46811, + "anon": 47079, + "anon": 13667, + "anonym": 38605, + "anonymous": 15036, + "anoo": 25690, + "anor": 13243, + "anor": 16596, + "anos": 20132, + "another": 29274, + "another": 1380, + "anova": 24116, + "ans": 24586, + "ans": 885, + "ansari": 40748, + "ansel": 40356, + "answ": 3369, + "answe": 14391, + "answer": 4518, + "answered": 14499, + "answering": 18280, + "answers": 8692, + "ant": 1103, + "ant": 773, + "anta": 3023, + "antag": 41745, + "antal": 39355, + "antalya": 47440, + "antan": 32899, + "antarc": 21338, + "antarctic": 27077, + "antarctica": 22587, + "ante": 19311, + "ante": 9769, + "antebellum": 41683, + "antelope": 39177, + "anten": 35517, + "antenna": 26370, + "anter": 46508, + "antes": 14927, + "antgrasso": 39074, + "anth": 3737, + "anth": 29741, + "antha": 47981, + "anthe": 34167, + "anthem": 12504, + "anthi": 45261, + "anthology": 21009, + "anthony": 17477, + "anthony": 6113, + "anthro": 10019, + "anthropo": 18538, + "anthropology": 32407, + "anthus": 37639, + "anti": 3120, + "anti": 3564, + "antibio": 18954, + "antibiotic": 34387, + "antibiotics": 29499, + "antibody": 49018, + "antic": 8260, + "anticip": 11435, + "anticipate": 38280, + "anticipated": 18605, + "anticipating": 48067, + "anticipation": 26983, + "antics": 37126, + "antidote": 45476, + "antifa": 35926, + "antigua": 39910, + "antine": 17641, + "antino": 27818, + "antioxid": 23010, + "antioxidant": 37452, + "antioxidants": 34208, + "antiqu": 21745, + "antique": 46517, + "antique": 9060, + "antiques": 17365, + "antis": 19748, + "antisemitism": 36630, + "antit": 37833, + "antitrust": 49343, + "antlers": 47720, + "antly": 5265, + "anto": 16826, + "anto": 24486, + "antoine": 25188, + "anton": 5497, + "anton": 19644, + "antoni": 39958, + "antonio": 30497, + "antonio": 7842, + "antony": 30707, + "antrim": 40252, + "ants": 1589, + "antv": 47520, + "antw": 44460, + "antwer": 26970, + "antwerp": 33797, + "antz": 25684, + "anu": 8537, + "anu": 17152, + "anup": 29617, + "anus": 27084, + "anush": 22765, + "anushka": 42080, + "anushka": 39822, + "anushkasharma": 44203, + "anwar": 34261, + "anxi": 9021, + "anxiety": 11103, + "anxious": 27793, + "any": 1307, + "any": 1504, + "anya": 11173, + "anybody": 10071, + "anyi": 41632, + "anymore": 7372, + "anyone": 2302, + "anything": 3582, + "anytime": 13924, + "anyway": 8931, + "anyways": 19778, + "anywhere": 8863, + "anz": 14445, + "anz": 19425, + "anza": 14669, + "anzac": 31977, + "ao": 7313, + "ao": 5703, + "aoa": 47119, + "aoc": 31918, + "aofficial": 30840, + "aoki": 33602, + "aol": 40643, + "aon": 30928, + "aon": 48476, + "aor": 32044, + "aos": 46860, + "ap": 688, + "ap": 2728, + "apa": 36954, + "apa": 13537, + "apac": 34320, + "apache": 23921, + "apal": 38017, + "apan": 36562, + "apar": 9161, + "apark": 32528, + "apart": 6474, + "apart": 7803, + "aparthe": 25121, + "apartheid": 26597, + "apartment": 8285, + "apartments": 15791, + "aparty": 26767, + "apat": 31755, + "apathy": 18145, + "apc": 20300, + "apd": 44563, + "ape": 6098, + "ape": 2609, + "apec": 47530, + "aper": 13681, + "aper": 5858, + "apers": 15846, + "apes": 9550, + "apeu": 19040, + "apex": 41935, + "apex": 23712, + "aph": 16341, + "aph": 29491, + "apha": 47104, + "apho": 21758, + "aphra": 44147, + "api": 23342, + "api": 14674, + "apia": 44259, + "apic": 40679, + "aping": 18456, + "apink": 35725, + "apis": 37575, + "apk": 27648, + "apo": 4089, + "apo": 19758, + "apocaly": 13932, + "apocalypse": 17571, + "apocalyptic": 35675, + "apol": 5023, + "apolice": 45663, + "apolis": 9598, + "apollo": 48213, + "apollo": 11554, + "apolo": 31094, + "apolog": 25530, + "apologe": 42908, + "apologi": 14977, + "apologies": 21959, + "apologise": 39608, + "apologize": 22879, + "apologizes": 35298, + "apology": 20768, + "apor": 21871, + "apore": 6679, + "apost": 20309, + "apostle": 33051, + "apostles": 48457, + "app": 882, + "app": 2231, + "appa": 4884, + "appa": 13110, + "appalach": 30523, + "appalachian": 36806, + "appalling": 44797, + "appar": 26698, + "apparatus": 37716, + "apparel": 13972, + "apparent": 23963, + "apparently": 5287, + "appe": 3748, + "appe": 45949, + "appeal": 9625, + "appealing": 25909, + "appeals": 22447, + "appear": 5544, + "appear": 9308, + "appearance": 7238, + "appearances": 17214, + "appeared": 11561, + "appearing": 18759, + "appears": 8743, + "appell": 43833, + "appen": 37201, + "appen": 26589, + "apper": 18780, + "appet": 21686, + "appeti": 24179, + "appetite": 24481, + "appetizer": 36065, + "applau": 24713, + "applaud": 42152, + "applause": 22650, + "apple": 8629, + "apple": 3055, + "applemusic": 21390, + "apples": 14032, + "appleton": 45250, + "appli": 15495, + "appliance": 33677, + "appliances": 22134, + "applic": 4235, + "applicable": 37927, + "applicants": 28035, + "application": 7241, + "applications": 7341, + "applied": 12636, + "applies": 24910, + "apply": 4356, + "applying": 17965, + "appo": 5433, + "appoint": 36190, + "appointed": 11087, + "appointment": 10890, + "appointments": 23439, + "appoints": 25132, + "apprais": 36972, + "appraisal": 46108, + "appreci": 3474, + "appreciate": 6263, + "appreciated": 9264, + "appreciates": 36573, + "appreciating": 39352, + "appreciation": 9212, + "appreciationday": 37438, + "appreciative": 45074, + "appren": 10582, + "apprentic": 15662, + "apprentice": 19122, + "apprentice": 17985, + "apprentices": 38252, + "apprenticeship": 26939, + "apprenticeships": 35425, + "appro": 2398, + "approach": 7781, + "approach": 6241, + "approached": 36499, + "approaches": 14962, + "approaching": 12164, + "appropri": 8446, + "appropriate": 10768, + "appropriately": 30383, + "appropriation": 49110, + "approval": 13549, + "approve": 19064, + "approved": 9412, + "approves": 18107, + "approx": 18266, + "approxim": 14201, + "approximately": 16128, + "apps": 7020, + "appstore": 31377, + "appt": 48112, + "appy": 34420, + "apr": 39396, + "apr": 11177, + "apra": 37027, + "apric": 25923, + "apricot": 30815, + "april": 23548, + "april": 2484, + "apro": 42712, + "apro": 49051, + "apron": 29502, + "aps": 8868, + "apse": 31843, + "apt": 17921, + "aptly": 47313, + "apu": 22166, + "apur": 36900, + "apur": 45193, + "aq": 14018, + "aq": 26862, + "aqu": 4458, + "aqua": 18613, + "aquaculture": 41885, + "aquaman": 35098, + "aquari": 37605, + "aquarium": 16814, + "aquarius": 38879, + "aquatic": 22658, + "aque": 35927, + "aque": 37268, + "aqui": 36826, + "aquino": 33796, + "ar": 516, + "ar": 625, + "ara": 24161, + "ara": 3340, + "arab": 5405, + "arab": 12028, + "arabia": 11746, + "arabian": 24663, + "arabic": 16709, + "arabs": 39155, + "arac": 47620, + "arach": 37689, + "arag": 41502, + "araj": 45142, + "arak": 23416, + "aram": 19223, + "aram": 21473, + "arama": 49066, + "aran": 20839, + "aran": 19641, + "aras": 36399, + "arat": 30856, + "arav": 35836, + "arbit": 20267, + "arbitr": 22702, + "arbitration": 34845, + "arbor": 33516, + "arbor": 24878, + "arboretum": 41719, + "arc": 4997, + "arc": 11592, + "arca": 25189, + "arca": 37612, + "arcade": 13331, + "arcadia": 38372, + "arch": 2458, + "arch": 8557, + "archa": 45619, + "archae": 10121, + "archaeological": 26163, + "archaeologists": 45035, + "archaeology": 14868, + "archan": 33359, + "archbishop": 23994, + "arche": 22474, + "archer": 21824, + "archers": 38407, + "archery": 23935, + "arches": 30771, + "archi": 4479, + "archie": 20557, + "archipel": 39750, + "archipelago": 43025, + "architec": 3359, + "architect": 12192, + "architects": 13290, + "architectural": 15360, + "architecture": 39038, + "architecture": 4920, + "archival": 39249, + "archive": 42257, + "archive": 10548, + "archived": 42379, + "archives": 9411, + "archy": 15643, + "arctic": 29716, + "arctic": 9138, + "ard": 3793, + "ard": 746, + "arden": 44600, + "arden": 27057, + "ardi": 23932, + "ardi": 19837, + "ardo": 35735, + "ardo": 9394, + "ards": 1654, + "ardu": 20906, + "arduino": 25398, + "are": 1076, + "are": 631, + "area": 2445, + "areas": 5429, + "arec": 18136, + "areclipse": 36030, + "ared": 5369, + "arel": 12798, + "arella": 24784, + "arelli": 48619, + "aren": 4033, + "aren": 4318, + "arena": 5463, + "arenas": 47860, + "arent": 37487, + "arer": 14857, + "arers": 33159, + "ares": 12224, + "arest": 11708, + "aret": 22247, + "areth": 47725, + "aretha": 42090, + "areyou": 37607, + "arez": 13108, + "arg": 27285, + "argent": 7812, + "argentina": 9789, + "argentine": 32582, + "argon": 40737, + "argos": 37443, + "argu": 7440, + "arguably": 30899, + "argue": 19788, + "argued": 48153, + "argues": 30045, + "arguing": 26549, + "argument": 16224, + "arguments": 24693, + "argus": 44300, + "argy": 21066, + "argyle": 36179, + "argyll": 40667, + "ari": 1221, + "ari": 3681, + "aria": 8883, + "arial": 42431, + "arian": 29980, + "arian": 6953, + "ariana": 14892, + "arianag": 23025, + "arianagrande": 23321, + "arianism": 44351, + "arians": 19104, + "arias": 22567, + "arie": 18774, + "ariel": 47959, + "ariel": 21025, + "aries": 5213, + "arif": 46621, + "arily": 12993, + "arin": 29564, + "arin": 18612, + "arina": 29271, + "arine": 29586, + "aring": 2142, + "ario": 8862, + "arios": 25392, + "aris": 15227, + "arise": 26490, + "arist": 12110, + "aristo": 25666, + "aristotle": 49156, + "arities": 31069, + "arity": 16608, + "arium": 11809, + "arius": 21482, + "ariz": 6516, + "arized": 40167, + "arizon": 28936, + "arizona": 7106, + "arjun": 24565, + "arjun": 20477, + "arjuna": 43835, + "ark": 11921, + "ark": 12010, + "arkansas": 12227, + "arkham": 36381, + "arl": 48542, + "arlington": 44940, + "arlington": 17865, + "arly": 3637, + "arm": 5671, + "arm": 4793, + "arma": 15887, + "arma": 38716, + "armad": 37897, + "armada": 34938, + "armagh": 44313, + "armani": 31314, + "armb": 37096, + "armchair": 45757, + "armed": 40471, + "armed": 8202, + "armen": 13145, + "armenia": 22008, + "armenian": 24891, + "armies": 46686, + "armin": 45481, + "arming": 19766, + "armist": 38150, + "armistice": 46765, + "armor": 16167, + "armored": 28214, + "armory": 38610, + "armour": 18503, + "armoured": 42514, + "arms": 5706, + "armstrong": 15005, + "army": 13541, + "army": 3133, + "armys": 27311, + "arn": 9348, + "arn": 37597, + "arnau": 45556, + "arne": 43509, + "arney": 35962, + "arnold": 49096, + "arnold": 13609, + "arns": 46692, + "aro": 7514, + "aro": 11551, + "aroa": 48209, + "arom": 16831, + "aroma": 40143, + "aroma": 26390, + "aromas": 47439, + "aromatherapy": 42584, + "aromatic": 39669, + "aron": 30855, + "aron": 28926, + "aroo": 47581, + "arora": 31897, + "arosa": 44264, + "arose": 44262, + "around": 35615, + "around": 1630, + "arqu": 35654, + "arquitec": 41703, + "arr": 39106, + "arr": 42489, + "arra": 32918, + "arra": 43827, + "arrahman": 44554, + "arran": 45722, + "arrang": 16711, + "arrange": 15410, + "arrange": 26311, + "arranged": 22451, + "arrangement": 23822, + "arrangements": 23792, + "arranging": 35321, + "array": 17293, + "arre": 4374, + "arrell": 28846, + "arrest": 9320, + "arrested": 5845, + "arresting": 43930, + "arrests": 20683, + "arri": 2115, + "arrival": 9073, + "arrivals": 19583, + "arrive": 8851, + "arrived": 3514, + "arrives": 9905, + "arriving": 10884, + "arro": 15729, + "arrog": 26997, + "arrogance": 47025, + "arrogant": 40582, + "arrow": 30920, + "arrow": 11149, + "arrowhead": 46393, + "arrows": 24768, + "arroyo": 45237, + "ars": 42815, + "ars": 864, + "arse": 22665, + "arsen": 5330, + "arsenal": 45234, + "arsenal": 6084, + "arsene": 32117, + "arson": 29937, + "art": 1486, + "art": 794, + "arta": 12031, + "arte": 13482, + "arte": 12947, + "artem": 40387, + "artemis": 45256, + "arten": 37043, + "arter": 29449, + "artery": 40062, + "artes": 48629, + "artforsale": 48239, + "artgallery": 31982, + "arth": 7146, + "arth": 20265, + "arthistory": 39313, + "arthr": 20807, + "arthritis": 22916, + "arthro": 43255, + "arthur": 35660, + "arthur": 8550, + "arti": 1635, + "arti": 34601, + "artic": 3003, + "articho": 30937, + "artichoke": 39647, + "article": 3550, + "articles": 11939, + "articul": 40343, + "articulate": 45444, + "artif": 8950, + "artifact": 37718, + "artifacts": 30249, + "artificial": 19357, + "artificial": 12040, + "artificialintelligence": 20799, + "artillery": 24465, + "artin": 33168, + "artin": 48540, + "artis": 41794, + "artisan": 36389, + "artisan": 21535, + "artisans": 40140, + "artist": 14326, + "artist": 2456, + "artiste": 41402, + "artistic": 12421, + "artiston": 48443, + "artistry": 38570, + "artists": 4899, + "artistson": 32127, + "artistsontwitter": 39469, + "artlovers": 35617, + "arto": 28464, + "artof": 31751, + "artoftheday": 43990, + "arton": 46744, + "arts": 22040, + "arts": 3812, + "artsy": 31588, + "arturo": 38591, + "artwit": 36713, + "artwork": 4188, + "artworks": 26215, + "arty": 45417, + "arty": 25916, + "aru": 13757, + "aru": 23907, + "aruba": 40131, + "arugula": 40770, + "arum": 48732, + "arun": 16105, + "arun": 31877, + "arunach": 47260, + "arunjaitley": 44874, + "arus": 22644, + "arvin": 16971, + "arvind": 21209, + "arvind": 41079, + "arvindkejriwal": 22971, + "arvo": 45726, + "arwx": 29824, + "ary": 4617, + "ary": 856, + "arya": 23594, + "aryan": 34966, + "as": 587, + "as": 601, + "asa": 39676, + "asa": 11914, + "asad": 42376, + "asaki": 22455, + "asam": 40603, + "asan": 22379, + "asan": 17841, + "asana": 42363, + "asant": 25536, + "asants": 37766, + "asap": 24199, + "asap": 10822, + "asar": 24733, + "asar": 49299, + "asb": 31186, + "asbe": 32113, + "asbestos": 33765, + "asc": 22720, + "asc": 23305, + "ascen": 20767, + "ascension": 35499, + "ascent": 36625, + "asci": 12753, + "asco": 25578, + "asco": 17488, + "ascot": 23723, + "ascri": 15506, + "asd": 36988, + "asda": 29391, + "asdf": 36857, + "asdfghj": 42758, + "asdfghjkl": 47660, + "ase": 8083, + "ase": 894, + "asean": 24472, + "aseball": 46903, + "ased": 2134, + "asen": 41085, + "aser": 39615, + "aser": 7209, + "ases": 3762, + "asf": 25863, + "asg": 34813, + "ash": 2067, + "ash": 2612, + "asha": 40572, + "asha": 13472, + "ashamed": 20633, + "ashby": 46531, + "ashe": 48523, + "ashe": 31752, + "asher": 37585, + "ashes": 12587, + "asheville": 28897, + "ashford": 37796, + "ashi": 15563, + "ashi": 15934, + "ashish": 33145, + "ashland": 39938, + "ashleigh": 49356, + "ashley": 17825, + "ashley": 8957, + "asho": 20273, + "ashok": 38141, + "ashore": 31194, + "ashram": 43445, + "ashton": 43264, + "ashton": 12228, + "ashtra": 18118, + "asi": 3596, + "asi": 12562, + "asia": 5741, + "asian": 21737, + "asian": 7128, + "asiangames": 49108, + "asians": 36771, + "asics": 31097, + "aside": 13676, + "asif": 37302, + "asim": 46050, + "asin": 48432, + "asin": 44347, + "asing": 4194, + "asingly": 15803, + "asion": 31753, + "asis": 12398, + "ask": 11027, + "ask": 2765, + "asked": 3993, + "asking": 5914, + "asks": 7953, + "asl": 41650, + "asleep": 10749, + "asley": 28206, + "asli": 44290, + "asm": 13851, + "asma": 38497, + "asmsg": 19839, + "aso": 30343, + "aso": 27932, + "asober": 43749, + "asocial": 48557, + "ason": 1163, + "asone": 31249, + "asons": 4249, + "asos": 37924, + "asot": 47968, + "asp": 17814, + "asp": 36666, + "asparag": 20301, + "asparagus": 20604, + "aspe": 10894, + "aspect": 19681, + "aspects": 18203, + "aspen": 35695, + "aspen": 25712, + "asper": 32991, + "asph": 28019, + "asphalt": 30574, + "aspir": 12669, + "aspirations": 36127, + "aspire": 24836, + "aspiring": 21862, + "asports": 43695, + "asr": 48052, + "asroma": 41000, + "ass": 12664, + "ass": 5301, + "assa": 47715, + "assad": 18699, + "assam": 19930, + "assan": 26352, + "assange": 27565, + "assas": 9603, + "assassin": 14366, + "assassin": 20029, + "assassinated": 40488, + "assassination": 24907, + "assassins": 34918, + "assassinscre": 36428, + "assassinscreed": 46082, + "assau": 7908, + "assaul": 19596, + "assault": 9679, + "assaulted": 30785, + "assaulting": 44143, + "asse": 3166, + "asse": 38600, + "assel": 37582, + "assemb": 5531, + "assemble": 26169, + "assembled": 22627, + "assemblies": 47406, + "assembling": 38670, + "assembly": 34542, + "assembly": 7059, + "assen": 38651, + "asser": 25665, + "asses": 21596, + "assess": 9209, + "assess": 23211, + "assessed": 44160, + "assessing": 31364, + "assessment": 10590, + "assessments": 32753, + "asset": 48463, + "asset": 13039, + "assets": 13170, + "assi": 2907, + "assi": 39540, + "assie": 31624, + "assign": 14190, + "assigned": 25767, + "assignment": 17342, + "assignments": 34257, + "assim": 36394, + "assimil": 43467, + "assist": 26558, + "assist": 10286, + "assistance": 11685, + "assistant": 6799, + "assistants": 31054, + "assisted": 18095, + "assisting": 24243, + "assists": 12675, + "assn": 44208, + "asso": 17617, + "assoc": 18891, + "associ": 3566, + "associate": 11777, + "associated": 11164, + "associates": 17358, + "association": 5578, + "associations": 33209, + "assor": 38604, + "assorted": 36701, + "assortment": 43112, + "asst": 24767, + "assu": 8328, + "assume": 19294, + "assumed": 37661, + "assuming": 29422, + "assump": 41182, + "assumption": 40773, + "assumptions": 45948, + "assurance": 28408, + "assure": 39161, + "assured": 25591, + "assures": 41988, + "assy": 29940, + "assy": 12963, + "ast": 1761, + "ast": 1242, + "asta": 43269, + "aste": 25033, + "aste": 25579, + "aster": 11013, + "aster": 9526, + "asteroid": 32253, + "asters": 33139, + "asth": 16684, + "asthma": 24610, + "asthour": 41238, + "astic": 15876, + "asting": 29984, + "astle": 46141, + "asto": 47275, + "aston": 24760, + "aston": 13879, + "astoni": 21962, + "astonishing": 27110, + "astonmartin": 40760, + "astor": 26391, + "astor": 47086, + "astoria": 34798, + "astounding": 37748, + "astr": 37609, + "astra": 47205, + "astra": 36079, + "astral": 45889, + "astri": 31243, + "astrid": 46499, + "astro": 8563, + "astro": 15318, + "astrology": 28526, + "astron": 7982, + "astronaut": 18376, + "astronauts": 29733, + "astronom": 23264, + "astronomer": 40036, + "astronomers": 44268, + "astronomical": 39775, + "astronomy": 17472, + "astrophotography": 38559, + "astros": 17598, + "asts": 10452, + "astu": 43137, + "astur": 45795, + "asu": 13157, + "asu": 16001, + "asun": 36044, + "asure": 3813, + "asus": 27269, + "aswell": 42978, + "asx": 38906, + "asy": 8524, + "asy": 2333, + "asylum": 15638, + "asym": 32539, + "at": 527, + "at": 536, + "ata": 4236, + "atable": 23909, + "atal": 24877, + "atal": 24797, + "atan": 33446, + "atar": 20128, + "atar": 7995, + "atari": 21549, + "atas": 30057, + "atay": 39518, + "atc": 28383, + "atch": 15938, + "atd": 33890, + "ate": 992, + "ate": 671, + "ateam": 42784, + "ateau": 16359, + "atec": 37352, + "atech": 31306, + "ated": 14589, + "ated": 943, + "atedly": 24698, + "atee": 32839, + "ateful": 5419, + "atelier": 29932, + "ately": 3862, + "atem": 17116, + "aten": 47984, + "atene": 30405, + "ateneo": 33904, + "ater": 18597, + "ater": 5877, + "ateral": 18819, + "aters": 22364, + "ates": 20370, + "ates": 1150, + "atest": 1705, + "ateur": 43677, + "atf": 28013, + "ath": 1374, + "ath": 1649, + "atha": 22530, + "atham": 23383, + "athan": 41260, + "athan": 26701, + "athe": 8963, + "athed": 47402, + "atheism": 25823, + "atheist": 22571, + "atheists": 47155, + "athen": 29112, + "athena": 30705, + "athens": 13524, + "ather": 6171, + "ather": 1817, + "athered": 34091, + "athers": 17266, + "athi": 28918, + "athing": 36069, + "athle": 3310, + "athlete": 7388, + "athletes": 7125, + "athletic": 33182, + "athletic": 9028, + "athletics": 7019, + "athlon": 14670, + "athome": 38217, + "athon": 4951, + "aths": 28835, + "athy": 34488, + "athy": 13183, + "ati": 591, + "ati": 6751, + "atia": 10908, + "atic": 20248, + "atic": 2647, + "atically": 13558, + "atics": 15666, + "atie": 30137, + "aties": 40060, + "atif": 41592, + "atiku": 37912, + "atile": 15474, + "atility": 23373, + "atime": 20158, + "atin": 36903, + "atin": 23047, + "atine": 39741, + "ating": 25653, + "ating": 1074, + "atio": 35401, + "ation": 2265, + "ation": 656, + "ational": 14205, + "ational": 3108, + "ationals": 44593, + "ationday": 20082, + "ations": 986, + "atis": 45456, + "atis": 41142, + "atism": 45638, + "ative": 18422, + "ative": 1648, + "atively": 11929, + "atives": 5629, + "ativity": 25166, + "atkins": 27734, + "atkinson": 28908, + "atl": 5411, + "atl": 10629, + "atla": 36043, + "atlan": 6818, + "atlanta": 39964, + "atlanta": 6839, + "atlantic": 28804, + "atlantic": 8189, + "atlantis": 27790, + "atlas": 15775, + "atle": 21170, + "atleast": 33231, + "atleti": 46067, + "atletico": 27501, + "atm": 14127, + "atmo": 8271, + "atmosphere": 10506, + "atmospheric": 24223, + "ato": 7987, + "ato": 4364, + "atoday": 26799, + "atom": 22418, + "atom": 24031, + "atomic": 18996, + "atoms": 41434, + "aton": 31525, + "aton": 10012, + "atop": 17455, + "ator": 10748, + "ator": 1962, + "atore": 28314, + "atorial": 32040, + "atories": 35678, + "atorium": 41306, + "ators": 3389, + "atory": 5920, + "atos": 41643, + "atour": 42967, + "atown": 24000, + "atp": 38105, + "atp": 19817, + "atr": 43247, + "atra": 20227, + "atra": 14401, + "atravel": 36981, + "atre": 46057, + "atri": 13882, + "atri": 38889, + "atric": 32238, + "atric": 13652, + "atrics": 36253, + "atrist": 41879, + "atrium": 29725, + "atrix": 43003, + "atro": 18724, + "atroc": 36197, + "atrocities": 37551, + "atry": 28334, + "ats": 46890, + "ats": 1032, + "atsu": 26531, + "att": 1017, + "att": 7103, + "atta": 7282, + "atta": 9146, + "attach": 43676, + "attach": 35653, + "attached": 11038, + "attachment": 28638, + "attack": 24971, + "attack": 3815, + "attacked": 12366, + "attacker": 39288, + "attackers": 47701, + "attacking": 16813, + "attacks": 7321, + "attain": 46459, + "attar": 37110, + "attemp": 4933, + "attempt": 7409, + "attempted": 17408, + "attempting": 18195, + "attempts": 15610, + "atten": 4084, + "atten": 32408, + "attenborough": 45860, + "attend": 9841, + "attend": 5802, + "attendance": 11928, + "attendant": 35424, + "attended": 8140, + "attendees": 14648, + "attending": 6696, + "attends": 22248, + "attention": 4936, + "atters": 30675, + "atthe": 21489, + "atti": 49265, + "atti": 16235, + "attic": 26766, + "attire": 21222, + "attitude": 10648, + "attitudes": 27611, + "attle": 14685, + "attle": 5030, + "attn": 25677, + "attor": 8856, + "attorney": 10372, + "attorneys": 29113, + "attrac": 7154, + "attract": 17010, + "attracted": 28493, + "attracting": 31909, + "attraction": 16807, + "attractions": 22307, + "attractive": 12231, + "attracts": 31024, + "attribu": 24624, + "attributed": 37520, + "attributes": 40763, + "attu": 43173, + "atty": 36705, + "atu": 15191, + "atu": 24295, + "atuesday": 34841, + "atul": 1744, + "atul": 43948, + "atum": 48295, + "atur": 14986, + "aturday": 29027, + "ature": 25305, + "ature": 4490, + "atures": 7358, + "atus": 14795, + "atv": 19598, + "atwood": 45680, + "atwork": 39680, + "atx": 34849, + "atx": 20136, + "aty": 40974, + "aty": 33107, + "atz": 30432, + "au": 627, + "au": 2566, + "aua": 45906, + "aub": 45938, + "auberg": 49382, + "aubre": 25899, + "aubrey": 34110, + "auburn": 42269, + "auburn": 14534, + "auc": 24489, + "auch": 43024, + "auck": 14588, + "auckland": 16072, + "auction": 48160, + "auction": 6462, + "auctioned": 41073, + "auctions": 24876, + "aucus": 47374, + "aud": 16107, + "aud": 19711, + "audi": 5091, + "audi": 10277, + "audible": 33227, + "audience": 6863, + "audiences": 22328, + "audio": 13792, + "audio": 5766, + "audiobook": 26282, + "audit": 12505, + "audit": 17625, + "auditi": 37377, + "audition": 18673, + "auditions": 21134, + "auditor": 38050, + "auditorium": 15063, + "audre": 16075, + "audrey": 18812, + "audu": 27934, + "audubon": 40275, + "auer": 33460, + "auf": 28924, + "aug": 15397, + "aug": 5720, + "auga": 22797, + "augh": 28310, + "augh": 14005, + "augmente": 48356, + "augmented": 32708, + "augu": 2610, + "august": 24353, + "august": 3171, + "augusta": 26144, + "augustine": 27397, + "augustus": 36835, + "auk": 19058, + "aul": 20695, + "aul": 34391, + "ault": 47253, + "ault": 10219, + "aun": 10608, + "aun": 38721, + "aunt": 12685, + "auntie": 23783, + "aunty": 29528, + "aur": 8156, + "aur": 17282, + "aura": 27728, + "aure": 36010, + "aureli": 35980, + "auror": 30067, + "aurora": 13500, + "aus": 10624, + "aus": 7630, + "ausa": 37384, + "ausbiz": 46543, + "ausch": 33926, + "auschwitz": 36523, + "ausopen": 27831, + "ausp": 35039, + "auspicious": 38806, + "auspol": 8241, + "aussi": 19762, + "aussie": 40230, + "aussie": 14424, + "aussies": 35727, + "aust": 26301, + "aust": 25418, + "austen": 29885, + "auster": 25030, + "austerity": 26982, + "austin": 12845, + "austin": 5125, + "austinmahone": 34678, + "austr": 2518, + "australi": 13798, + "australia": 3444, + "australian": 23630, + "australian": 6258, + "australians": 31488, + "austri": 8946, + "austria": 11960, + "austrian": 20638, + "ausv": 35206, + "ausvotes": 34661, + "aut": 12343, + "auth": 2381, + "auth": 38247, + "authent": 18158, + "authentic": 41266, + "authentic": 10369, + "authentication": 39746, + "authenticity": 35734, + "autho": 34552, + "author": 14447, + "author": 4358, + "authored": 37928, + "authori": 19207, + "authorities": 12729, + "authority": 10524, + "authorization": 48854, + "authorized": 28463, + "authors": 10765, + "auti": 8200, + "autism": 36256, + "autism": 11244, + "autisma": 43324, + "autistic": 29360, + "auto": 3917, + "auto": 5668, + "autobiography": 31509, + "autodesk": 40415, + "autograph": 10657, + "autograph": 13722, + "autographed": 16309, + "autographs": 17376, + "autoimmune": 45509, + "autom": 4114, + "automate": 43203, + "automated": 19022, + "automatic": 12126, + "automatically": 20725, + "automation": 12328, + "automobi": 44813, + "automobile": 25258, + "automotive": 12607, + "auton": 13100, + "autonews": 43975, + "autonom": 17870, + "autonomous": 20722, + "autonomy": 39223, + "autopsy": 44436, + "autos": 31118, + "autoshow": 46788, + "auts": 21140, + "autu": 5445, + "autum": 31783, + "autumn": 28940, + "autumn": 6110, + "autumnal": 35481, + "aux": 18154, + "aux": 8909, + "auxiliary": 37778, + "av": 722, + "av": 8484, + "ava": 12385, + "avage": 31505, + "avail": 1651, + "avail": 16686, + "availability": 17551, + "available": 1685, + "aval": 18012, + "avalan": 23970, + "avalanche": 25815, + "avalley": 45082, + "avalon": 30436, + "avan": 27971, + "avan": 33351, + "avant": 24305, + "avar": 33423, + "avatar": 18219, + "ave": 10062, + "ave": 4860, + "avec": 25828, + "aved": 47918, + "avel": 46817, + "avel": 48088, + "aven": 5963, + "aven": 32971, + "aveng": 21935, + "avenger": 24799, + "avengers": 39413, + "avengers": 12016, + "avengersendgame": 49342, + "avent": 22700, + "avenue": 7042, + "aver": 8788, + "aver": 11403, + "average": 6254, + "averaged": 37310, + "averages": 48982, + "averaging": 35266, + "avery": 20313, + "aves": 14023, + "avfc": 21304, + "avg": 19452, + "avgeek": 11114, + "avi": 3324, + "avi": 11297, + "avia": 38710, + "avian": 24115, + "aviation": 27717, + "aviation": 7617, + "aviator": 38921, + "aviators": 48011, + "avici": 46192, + "avicii": 49158, + "avid": 19118, + "avier": 14598, + "avila": 45339, + "aville": 40689, + "avin": 46204, + "avis": 45163, + "avis": 19765, + "aviv": 22130, + "aviva": 47122, + "aviz": 27607, + "avl": 44749, + "avo": 4496, + "avo": 32400, + "avoc": 12291, + "avocado": 14135, + "avocados": 48911, + "avoi": 16797, + "avoid": 30448, + "avoid": 5983, + "avoidance": 47983, + "avoided": 32103, + "avoiding": 22086, + "avoids": 48220, + "avon": 22790, + "avon": 17348, + "avril": 37763, + "avs": 31896, + "avut": 44472, + "avy": 29973, + "aw": 808, + "aw": 5557, + "awa": 4820, + "awa": 6872, + "await": 20769, + "awaited": 20092, + "awaiting": 14872, + "awaits": 15635, + "awak": 9776, + "awak": 41387, + "awake": 14695, + "awaken": 35412, + "awakening": 17017, + "awakens": 23191, + "awal": 42447, + "awal": 35090, + "awan": 48869, + "awan": 20420, + "awar": 5745, + "award": 36310, + "award": 2047, + "awarded": 7368, + "awarding": 37089, + "awards": 34528, + "awards": 2320, + "aware": 4427, + "aware": 7196, + "awareness": 19217, + "awareness": 4823, + "awarenessmonth": 34278, + "awarenessweek": 35294, + "away": 21088, + "away": 1520, + "aways": 12782, + "awaz": 18586, + "awd": 34846, + "awe": 1693, + "awe": 14106, + "aweather": 42142, + "aweather": 28681, + "awec": 38916, + "aweed": 29724, + "awesom": 16727, + "awesome": 30390, + "awesome": 1848, + "awesomeness": 22430, + "awful": 13617, + "awg": 46350, + "awgs": 35275, + "awh": 39566, + "awhile": 19171, + "awi": 15167, + "awil": 47271, + "awilliams": 42163, + "awk": 8888, + "awk": 40943, + "awkward": 42337, + "awkward": 10304, + "awn": 46222, + "awp": 43300, + "aws": 19658, + "awsome": 47196, + "awson": 36286, + "aww": 11568, + "awww": 15634, + "awwww": 26460, + "awx": 28385, + "ax": 3165, + "ax": 9203, + "axe": 19861, + "axel": 47889, + "axel": 32131, + "axes": 45970, + "axi": 30672, + "axial": 46550, + "axis": 19614, + "axle": 39003, + "axx": 47411, + "ay": 658, + "ay": 551, + "aya": 5917, + "ayala": 39827, + "ayama": 41194, + "ayan": 37781, + "ayan": 16269, + "ayana": 37400, + "ayas": 40904, + "ayat": 44902, + "ayat": 35720, + "aye": 21661, + "aye": 12446, + "ayer": 24852, + "ayers": 42783, + "ayesha": 46570, + "ayi": 33025, + "ayles": 44706, + "ayne": 35669, + "ayo": 21929, + "ayo": 18708, + "ayr": 23002, + "ayr": 36473, + "ayrshire": 32687, + "ays": 785, + "ayu": 40769, + "ayurve": 27185, + "ayurveda": 38986, + "ayush": 44831, + "ayy": 32514, + "ayyy": 41052, + "az": 854, + "az": 5468, + "aza": 22883, + "azad": 37838, + "azalea": 34087, + "azam": 34727, + "azar": 27911, + "azcardinals": 48846, + "aze": 41157, + "aze": 28485, + "azer": 19169, + "azerbai": 20649, + "azerbaijan": 23888, + "azhar": 47019, + "azi": 23914, + "azi": 18452, + "azine": 29140, + "azione": 48335, + "aziz": 41205, + "aziz": 29630, + "azo": 41227, + "azon": 36854, + "azores": 42826, + "azte": 33270, + "aztec": 34749, + "aztecs": 49387, + "azu": 27701, + "azu": 46963, + "azul": 39807, + "azure": 18514, + "azwx": 30262, + "azy": 24783, + "azz": 9817, + "azz": 26453, + "azza": 22255, + "azzi": 18758, + "azzle": 39974, + "azzo": 26779, + "azzur": 37055, + "azzy": 44534, + "añ": 23716, + "años": 41634, + "b": 65, + "b": 321, + "ba": 932, + "ba": 1792, + "baa": 33004, + "baahu": 34145, + "baahubali": 38663, + "bab": 1202, + "bab": 19039, + "baba": 12631, + "babe": 31177, + "babe": 7716, + "babes": 14253, + "babies": 6635, + "babs": 36217, + "babu": 21623, + "baby": 7268, + "baby": 1794, + "babygirl": 39554, + "babylon": 31928, + "babymetal": 45013, + "babys": 22266, + "babysitting": 34186, + "bac": 2791, + "bac": 25867, + "bacca": 40708, + "bach": 11773, + "bach": 8758, + "bachchan": 17690, + "bachel": 11283, + "bachelor": 45508, + "bachelor": 16766, + "bachelore": 26009, + "bachelorette": 29093, + "bacher": 49211, + "back": 1663, + "back": 893, + "backbone": 35635, + "backdrop": 20802, + "backed": 12721, + "backer": 22183, + "backers": 32934, + "background": 5994, + "backgrounds": 28215, + "backing": 14935, + "backlash": 31519, + "backpack": 14894, + "backpacking": 29524, + "backpacks": 37063, + "backs": 7562, + "backseat": 48812, + "backstage": 9236, + "backstreet": 46337, + "backthe": 26127, + "backto": 18703, + "backtoschool": 28730, + "backtothe": 43059, + "backup": 14415, + "backward": 37964, + "backwards": 21283, + "backyard": 12608, + "bacon": 48666, + "bacon": 7104, + "bacter": 11814, + "bacteria": 16556, + "bacterial": 26101, + "bad": 2564, + "bad": 2103, + "bada": 37475, + "badan": 39149, + "badass": 11616, + "baddest": 38112, + "baden": 36690, + "bader": 42254, + "badge": 11301, + "badger": 32686, + "badger": 22363, + "badgers": 22521, + "badges": 20084, + "badlands": 43192, + "badly": 13684, + "badminton": 21412, + "badoo": 33192, + "bados": 25755, + "bae": 32834, + "bae": 6855, + "baek": 18557, + "baek": 32702, + "baekhyun": 21572, + "baes": 46332, + "baf": 13616, + "baff": 35693, + "bafta": 29199, + "bag": 3408, + "bag": 3365, + "bage": 9698, + "bagel": 28777, + "bagels": 37489, + "baggage": 31402, + "bagged": 34047, + "bagh": 21659, + "bagh": 37271, + "baghdad": 30763, + "bago": 25105, + "bags": 6136, + "bagu": 27749, + "baguette": 45334, + "bah": 8372, + "bah": 16685, + "baha": 29592, + "baham": 43718, + "bahamas": 21224, + "bahan": 28704, + "bahn": 33452, + "bahrain": 12503, + "bai": 6232, + "bai": 23339, + "bail": 22933, + "bail": 16986, + "bailey": 27535, + "bailey": 10180, + "bain": 40784, + "bain": 21593, + "bair": 29059, + "baird": 40474, + "bait": 18010, + "baj": 20713, + "baja": 40418, + "baja": 28374, + "bajo": 32619, + "bak": 4059, + "bak": 23742, + "bakar": 41414, + "bake": 20736, + "bake": 11878, + "baked": 10364, + "baker": 27303, + "baker": 7743, + "bakers": 35293, + "bakers": 40231, + "bakersfield": 40149, + "bakery": 13377, + "bakes": 43057, + "bakhta": 44912, + "bakhtawar": 46937, + "bakhtawarbz": 47118, + "baking": 11467, + "baku": 46417, + "baku": 31852, + "bal": 1398, + "bal": 2282, + "bala": 20291, + "balaji": 48694, + "balance": 42894, + "balance": 6827, + "balanced": 15273, + "balances": 37733, + "balancing": 23541, + "balboa": 45098, + "balcony": 16169, + "bald": 11153, + "bald": 14875, + "baldhead": 29191, + "baldwin": 16242, + "bale": 48573, + "bale": 18873, + "bales": 42879, + "bali": 16432, + "bali": 10900, + "balkan": 48499, + "balkans": 42987, + "ball": 3807, + "ball": 1069, + "balla": 42246, + "ballad": 33472, + "ballarat": 46645, + "ballard": 31750, + "baller": 49194, + "baller": 25655, + "ballerina": 34962, + "ballers": 34173, + "ballet": 10703, + "balli": 29406, + "ballin": 47444, + "ballin": 33057, + "balling": 47588, + "ballis": 46675, + "ballistic": 36667, + "ballo": 8871, + "ballon": 36469, + "balloon": 13634, + "balloons": 18130, + "ballot": 14185, + "ballots": 35051, + "ballpark": 26080, + "ballroom": 15493, + "balls": 6927, + "bally": 17275, + "bally": 29451, + "balm": 24962, + "balmain": 45929, + "balo": 12395, + "baloch": 23173, + "balochistan": 21918, + "balot": 44615, + "balotelli": 45721, + "bals": 44154, + "balsam": 29121, + "balsamic": 32654, + "balt": 24441, + "balti": 8400, + "baltic": 23817, + "baltimore": 38502, + "baltimore": 9582, + "balu": 38093, + "bam": 6383, + "bam": 12686, + "bama": 20021, + "bambam": 34538, + "bambi": 46596, + "bamboo": 49322, + "bamboo": 16748, + "ban": 1159, + "ban": 2777, + "bana": 18428, + "banan": 38410, + "banana": 8922, + "bananas": 19121, + "banc": 39252, + "band": 4613, + "band": 1963, + "banda": 31865, + "bandai": 42054, + "bandana": 39265, + "bandcamp": 32229, + "banded": 37804, + "bandic": 44400, + "bandit": 27639, + "bandits": 33940, + "bandra": 41393, + "bands": 7858, + "bandung": 29512, + "bandwagon": 36432, + "bandwidth": 48859, + "bane": 9597, + "banerjee": 48102, + "banff": 29565, + "bang": 3524, + "bang": 6907, + "bangalore": 14697, + "banger": 24872, + "bangers": 38311, + "banging": 33033, + "bangkok": 12351, + "bangla": 10339, + "bangla": 45928, + "bangladesh": 11245, + "bangle": 37634, + "bangor": 31190, + "bangs": 27992, + "bangtan": 39131, + "bani": 19732, + "banjo": 27014, + "bank": 7061, + "bank": 2723, + "banker": 27316, + "bankers": 30599, + "bankholiday": 48868, + "banking": 9566, + "bankno": 49201, + "bankof": 39120, + "bankrup": 21904, + "bankrupt": 23077, + "bankrupt": 37288, + "bankruptcy": 23978, + "banks": 6367, + "banksy": 33350, + "bann": 5304, + "banned": 12012, + "banner": 9185, + "banners": 23145, + "banning": 26246, + "bannon": 29710, + "bano": 42947, + "banquet": 14254, + "bans": 15146, + "bant": 23301, + "bant": 46657, + "banter": 25535, + "bao": 39487, + "bao": 20408, + "bap": 7415, + "bap": 23754, + "bapti": 15477, + "baptism": 36765, + "baptist": 13274, + "baptiste": 45770, + "baptized": 45400, + "bar": 1040, + "bar": 2411, + "bara": 19345, + "barack": 18670, + "barack": 22481, + "barackobama": 18885, + "barak": 47419, + "barak": 16260, + "barang": 38446, + "barb": 24173, + "barb": 20913, + "barbados": 26992, + "barbar": 7906, + "barbara": 10937, + "barbarian": 42530, + "barbe": 18372, + "barbecue": 23501, + "barber": 19517, + "barber": 12296, + "barbershop": 37707, + "barbican": 47668, + "barbie": 16923, + "barca": 22942, + "barcel": 6134, + "barcelon": 47820, + "barcelona": 6412, + "barclay": 48877, + "barclay": 45276, + "barclays": 29538, + "bard": 39812, + "bard": 17514, + "bare": 16023, + "bare": 14318, + "barefoot": 30327, + "barely": 12684, + "bargain": 15076, + "bargaining": 41282, + "bargains": 34126, + "barge": 28272, + "bari": 21428, + "bari": 28016, + "barista": 31078, + "barit": 46300, + "bark": 32333, + "bark": 16560, + "barker": 20618, + "barking": 32676, + "barkley": 30266, + "barley": 22607, + "barlow": 25483, + "barn": 10490, + "barn": 10942, + "barnab": 43272, + "barnard": 44332, + "barne": 42527, + "barnes": 13102, + "barnet": 41943, + "barnett": 27650, + "barney": 24563, + "barns": 43759, + "barnsley": 37109, + "barnsley": 32153, + "baro": 17422, + "baro": 30817, + "baron": 48371, + "baron": 19349, + "baroness": 45056, + "barons": 45596, + "baroque": 25065, + "barr": 39473, + "barr": 22492, + "barra": 28442, + "barra": 33542, + "barrabest": 41376, + "barrac": 40835, + "barracks": 35822, + "barre": 13840, + "barre": 38257, + "barred": 33261, + "barrel": 11703, + "barrels": 22059, + "barren": 46743, + "barrett": 18701, + "barri": 8660, + "barric": 29189, + "barrie": 27090, + "barrier": 15706, + "barriers": 16321, + "barrington": 48954, + "barron": 34881, + "barrow": 42568, + "barrow": 24983, + "barry": 18028, + "barry": 8461, + "barrymore": 49310, + "bars": 8616, + "barstool": 44826, + "bart": 14838, + "bart": 12870, + "bartender": 33498, + "barthol": 48989, + "bartlett": 37130, + "bartol": 38209, + "barton": 48853, + "barton": 20345, + "baru": 16356, + "barun": 38278, + "barunsob": 41398, + "barça": 32788, + "bas": 1244, + "bas": 11420, + "basa": 26142, + "base": 2776, + "base": 4579, + "baseball": 23479, + "baseball": 3470, + "based": 35196, + "based": 2812, + "basel": 42803, + "basel": 20903, + "baseline": 40648, + "baseman": 45910, + "basement": 14792, + "bases": 20496, + "bash": 20462, + "bash": 10972, + "bashing": 37545, + "bashir": 42799, + "basic": 40452, + "basic": 7696, + "basically": 9125, + "basics": 15825, + "basil": 19225, + "basil": 14936, + "basilica": 27879, + "basin": 16117, + "basing": 47321, + "basis": 12278, + "baske": 3713, + "basket": 10338, + "basketball": 40023, + "basketball": 3835, + "baskets": 27787, + "basking": 39769, + "basque": 37175, + "bass": 22831, + "bass": 5992, + "bassett": 45992, + "bassist": 26496, + "bast": 28092, + "basti": 8559, + "bastille": 41874, + "bat": 2121, + "bat": 6575, + "bata": 39277, + "batb": 33962, + "batch": 9413, + "bate": 25034, + "bate": 28277, + "bateman": 41635, + "bates": 21727, + "batgirl": 46460, + "bath": 6064, + "bath": 5713, + "bathing": 20144, + "bathro": 21201, + "bathroom": 8470, + "bathrooms": 26434, + "baths": 19442, + "bathtub": 39942, + "bathurst": 36365, + "bati": 23362, + "bati": 37589, + "batman": 27811, + "batman": 7223, + "baton": 24331, + "bats": 14984, + "batsman": 35432, + "batt": 2407, + "batt": 48595, + "battalion": 20820, + "batter": 12654, + "batter": 31855, + "battered": 34375, + "batteries": 16666, + "battersea": 35839, + "battery": 7870, + "batting": 17401, + "battle": 7344, + "battle": 3528, + "battled": 37837, + "battlefield": 16055, + "battlefront": 42214, + "battleof": 47560, + "battles": 14213, + "battleship": 35165, + "battling": 17268, + "bau": 6055, + "bau": 34840, + "bauer": 22903, + "baugh": 41301, + "baum": 19840, + "bautista": 31881, + "bav": 21075, + "bavaria": 39977, + "bavarian": 44458, + "baw": 19808, + "bax": 21216, + "baxter": 26168, + "bay": 3631, + "bay": 2174, + "baya": 31573, + "bayan": 43895, + "bayarea": 28260, + "bayer": 48548, + "bayer": 29183, + "bayern": 14666, + "baylor": 21721, + "bayou": 33955, + "bays": 40156, + "baz": 10430, + "baz": 25268, + "bazaar": 20070, + "bazar": 49298, + "bb": 1174, + "bb": 3529, + "bba": 27762, + "bball": 15664, + "bbb": 33535, + "bbc": 5123, + "bbc": 5188, + "bbcc": 39052, + "bbce": 33818, + "bbcnews": 29370, + "bbcone": 28259, + "bbcqt": 37343, + "bbcr": 35802, + "bbcra": 17115, + "bbcradi": 49213, + "bbcradio": 22876, + "bbcsport": 49321, + "bbcspringwatch": 37358, + "bbctwo": 40395, + "bbcworld": 47340, + "bbe": 37559, + "bbed": 9077, + "bber": 7933, + "bbers": 36494, + "bbhutto": 28085, + "bbhuttozardari": 28135, + "bbi": 37047, + "bbin": 38553, + "bbing": 9787, + "bbins": 42504, + "bbl": 21961, + "bble": 26570, + "bble": 5924, + "bbled": 37626, + "bbles": 18093, + "bblo": 21231, + "bbloggers": 26614, + "bbly": 43031, + "bbm": 25382, + "bbmas": 22145, + "bbn": 28427, + "bbnaija": 20984, + "bbo": 21892, + "bbq": 41270, + "bbq": 6726, + "bbs": 10002, + "bbuk": 45978, + "bby": 11166, + "bby": 3810, + "bc": 3116, + "bc": 2162, + "bcc": 41509, + "bcci": 36138, + "bce": 36510, + "bcfc": 34359, + "bch": 36684, + "bcn": 25766, + "bcoz": 46373, + "bcpoli": 24389, + "bcs": 24909, + "bcu": 28299, + "bd": 24358, + "bd": 11165, + "bday": 33022, + "bday": 5781, + "bdg": 48418, + "bds": 26732, + "be": 571, + "be": 655, + "bea": 21886, + "bea": 20925, + "beach": 6068, + "beach": 2117, + "beaches": 12183, + "beachlife": 43824, + "beacon": 36883, + "beacon": 18858, + "beacons": 39395, + "bead": 31621, + "bead": 23557, + "beaded": 26661, + "beads": 14099, + "beagle": 30044, + "beak": 36498, + "beal": 45769, + "beale": 39717, + "beam": 35339, + "beam": 13663, + "beams": 23993, + "bean": 16471, + "bean": 5328, + "beanie": 21534, + "beans": 8302, + "bear": 6375, + "bear": 4298, + "bearable": 38608, + "bearcats": 33242, + "beard": 26157, + "beard": 9052, + "bearded": 28459, + "beardown": 43687, + "beards": 33020, + "bearer": 30686, + "bearers": 47986, + "bearing": 18370, + "bearings": 42083, + "bearish": 34829, + "bears": 6182, + "beasley": 43349, + "beast": 20847, + "beast": 6957, + "beastmode": 43076, + "beasts": 21771, + "beat": 3774, + "beat": 3018, + "beaten": 10864, + "beater": 41974, + "beati": 44386, + "beating": 10078, + "beatles": 11961, + "beatport": 31421, + "beatrice": 36922, + "beats": 6289, + "beatthe": 40550, + "beatty": 39903, + "beatz": 33363, + "beau": 1016, + "beau": 14298, + "beaufort": 45423, + "beaumont": 32857, + "beaut": 24559, + "beauti": 1154, + "beauties": 14874, + "beautiful": 13662, + "beautiful": 1215, + "beautifully": 10627, + "beauty": 12881, + "beauty": 2488, + "beav": 23260, + "beaver": 26432, + "beaver": 22874, + "beavers": 34513, + "beavs": 43909, + "bebe": 23331, + "bec": 6899, + "bec": 10773, + "became": 5464, + "because": 32714, + "because": 1631, + "becca": 27088, + "bech": 44055, + "beck": 8256, + "beck": 10396, + "becker": 26918, + "beckett": 27249, + "beckham": 18764, + "becky": 32406, + "becky": 18921, + "become": 2989, + "becomes": 6766, + "becoming": 6208, + "bed": 4152, + "bed": 2722, + "bedding": 31761, + "bedford": 20779, + "bedi": 39181, + "bedro": 18415, + "bedroom": 8411, + "bedrooms": 23996, + "beds": 13914, + "bedside": 47473, + "bedtime": 22115, + "bee": 6097, + "bee": 5028, + "beech": 32733, + "beech": 27596, + "beef": 21703, + "beef": 6529, + "beek": 37915, + "been": 33986, + "been": 1025, + "beep": 33432, + "beer": 8885, + "beer": 2544, + "beers": 10907, + "bees": 36249, + "bees": 9100, + "beet": 12582, + "beet": 28621, + "beethoven": 23656, + "beetle": 16534, + "beetles": 36317, + "beetro": 29251, + "beetroot": 31638, + "beets": 36087, + "before": 20898, + "before": 1348, + "beg": 2219, + "beg": 22401, + "began": 8636, + "begg": 36769, + "begging": 25371, + "begin": 19197, + "begin": 4947, + "beginner": 24351, + "beginners": 21930, + "beginning": 5791, + "beginnings": 22581, + "begins": 4635, + "begs": 43531, + "begun": 10514, + "beh": 21971, + "beh": 41612, + "beha": 5737, + "behalf": 11470, + "behave": 28825, + "behaved": 41617, + "behavi": 6149, + "behaving": 40745, + "behavior": 10461, + "behavioral": 25135, + "behaviors": 37741, + "behaviour": 14655, + "behavioural": 46019, + "behe": 42329, + "behin": 2335, + "behind": 2403, + "behindthe": 21104, + "behindthescenes": 26253, + "behold": 15929, + "bei": 38991, + "bei": 23227, + "beige": 26677, + "beij": 11547, + "beijing": 11796, + "bein": 39117, + "bein": 24168, + "being": 13481, + "being": 1265, + "beings": 17998, + "beingsalmankhan": 19637, + "beir": 20176, + "beirut": 22352, + "beit": 26963, + "bek": 46846, + "bek": 26135, + "bekind": 46691, + "bel": 1308, + "bel": 3543, + "bela": 30555, + "belarus": 30849, + "belated": 20256, + "belfast": 35100, + "belfast": 10015, + "belgi": 7001, + "belgian": 15008, + "belgium": 10239, + "belgrade": 30502, + "beli": 1859, + "beli": 45842, + "belichick": 46132, + "belie": 20854, + "beliebers": 27714, + "belief": 14802, + "beliefs": 20575, + "believ": 4972, + "believe": 15819, + "believe": 2649, + "believed": 13380, + "believein": 24294, + "believeinfilm": 37375, + "believer": 26057, + "believers": 28434, + "believes": 12017, + "believing": 19551, + "belinda": 44415, + "belize": 27990, + "bell": 5417, + "bell": 3718, + "bella": 18282, + "bella": 10418, + "bellamy": 34461, + "bellator": 31985, + "belle": 13587, + "belle": 11496, + "belles": 40678, + "bellevue": 32715, + "belli": 43335, + "bellletstalk": 42695, + "bello": 21954, + "bells": 12811, + "bellum": 35493, + "belly": 25901, + "belly": 10404, + "belmont": 25612, + "belo": 8379, + "belo": 41649, + "belong": 16453, + "belong": 13596, + "belonged": 39893, + "belonging": 28193, + "belongs": 14395, + "beloved": 9363, + "below": 3788, + "bels": 43127, + "belt": 36416, + "belt": 7373, + "belts": 21888, + "belvedere": 48003, + "ben": 1465, + "ben": 3518, + "bena": 46249, + "bench": 17770, + "bench": 8771, + "benches": 36349, + "benchmark": 31775, + "bend": 22100, + "bend": 13332, + "bender": 22551, + "bendigo": 48197, + "bending": 33897, + "bene": 12091, + "bene": 47151, + "beneath": 16850, + "bened": 13216, + "benedic": 24402, + "benedict": 47896, + "benedict": 18027, + "benef": 3260, + "benefici": 38593, + "beneficial": 24660, + "beneficiaries": 42160, + "benefit": 6399, + "benefited": 48266, + "benefiting": 29474, + "benefits": 5465, + "benefitting": 47222, + "benevol": 47060, + "benfica": 33873, + "beng": 6962, + "bengal": 17404, + "bengal": 16374, + "bengali": 33774, + "bengals": 23737, + "bengaluru": 21707, + "benghazi": 25967, + "benin": 40296, + "benitez": 46711, + "benjam": 10550, + "benjamin": 38647, + "benjamin": 12131, + "benji": 43548, + "benn": 39097, + "bennet": 48536, + "bennett": 12186, + "benny": 42369, + "benny": 20595, + "beno": 35268, + "benoit": 44373, + "benson": 19578, + "bent": 9809, + "bent": 18369, + "bentley": 16859, + "benton": 30812, + "benz": 27937, + "benz": 13470, + "ber": 867, + "ber": 1516, + "bera": 32802, + "bere": 17458, + "bered": 9193, + "beren": 33654, + "beret": 41658, + "berg": 12022, + "berg": 3294, + "bergen": 22918, + "berger": 35933, + "berger": 13873, + "bergh": 35120, + "bergman": 42597, + "bergs": 43592, + "berk": 15633, + "berke": 14639, + "berkeley": 46049, + "berkeley": 16667, + "berkshire": 27300, + "berlin": 23532, + "berlin": 5891, + "berman": 21514, + "bermu": 21032, + "bermuda": 24644, + "bern": 9195, + "bern": 18382, + "bernade": 46242, + "bernar": 11962, + "bernard": 14579, + "bernardino": 35328, + "bernardo": 27137, + "bernardo": 28696, + "bernardokath": 29081, + "bernat": 40578, + "berni": 18798, + "bernie": 40093, + "bernie": 10503, + "berniesanders": 23745, + "bernstein": 33936, + "berra": 15089, + "berries": 8319, + "berry": 15334, + "berry": 3488, + "bers": 6408, + "berser": 39037, + "bert": 17340, + "bert": 2358, + "berta": 45187, + "berth": 28317, + "bertie": 47182, + "berto": 34073, + "bertr": 36962, + "bertrand": 41594, + "berts": 30205, + "berty": 35973, + "berwick": 40407, + "bery": 11411, + "bes": 26911, + "bes": 3635, + "beside": 13519, + "besides": 17596, + "bespoke": 15612, + "bess": 43791, + "best": 3419, + "best": 949, + "bestbuy": 29749, + "bestest": 31199, + "bestfan": 23880, + "bestfanarmy": 24590, + "bestfriend": 29832, + "bestfriend": 11856, + "bestfriends": 23555, + "besti": 35210, + "bestie": 17188, + "besties": 27346, + "besto": 28615, + "bestof": 27892, + "bestof": 39533, + "bestseller": 25841, + "bestselling": 28632, + "bet": 1051, + "bet": 4430, + "beta": 43188, + "beta": 9505, + "betes": 10255, + "beth": 9993, + "beth": 4892, + "bethan": 18781, + "bethany": 39130, + "bethany": 27952, + "bethe": 12624, + "bethel": 33410, + "bethesda": 32527, + "bethle": 30760, + "bethlehem": 31827, + "betis": 45590, + "beto": 33721, + "betra": 18436, + "betrayal": 33171, + "betrayed": 35692, + "bets": 17107, + "betsy": 28946, + "bett": 17715, + "bett": 20489, + "betta": 36387, + "bette": 35855, + "better": 10320, + "better": 1539, + "bettertogether": 47392, + "betting": 14319, + "betts": 38637, + "betty": 36175, + "betty": 14350, + "between": 1957, + "beu": 38660, + "bev": 40324, + "bev": 30968, + "bever": 9924, + "beverage": 18694, + "beverages": 28521, + "beverley": 39165, + "beverly": 30906, + "beverly": 16728, + "beverlyhills": 45363, + "beware": 14532, + "bewithyou": 36787, + "bex": 18676, + "bex": 24748, + "bexhill": 49200, + "bey": 3234, + "bey": 6767, + "beyon": 11447, + "beyonce": 16632, + "beyoncé": 19219, + "beyond": 22246, + "beyond": 4432, + "bez": 28592, + "bez": 46764, + "bezos": 45000, + "bf": 19858, + "bf": 7990, + "bfc": 37183, + "bff": 11984, + "bffs": 31462, + "bfi": 34244, + "bg": 16674, + "bg": 11295, + "bgc": 47598, + "bgs": 47963, + "bgt": 40665, + "bh": 9930, + "bh": 13603, + "bha": 6144, + "bha": 33068, + "bhafc": 30779, + "bhagat": 49136, + "bhai": 48370, + "bhai": 20508, + "bhak": 34501, + "bham": 31874, + "bham": 23491, + "bhan": 27356, + "bhand": 48679, + "bhar": 9108, + "bharat": 27454, + "bharat": 17430, + "bharti": 46803, + "bhat": 23784, + "bhatt": 36143, + "bhav": 44950, + "bhi": 28943, + "bhi": 21955, + "bhk": 45070, + "bhm": 38741, + "bho": 19721, + "bhopal": 44573, + "bhp": 29776, + "bhs": 29195, + "bhu": 9172, + "bhuban": 38729, + "bhubanes": 41213, + "bhubaneswar": 45888, + "bhushan": 40884, + "bhutan": 32391, + "bhutto": 30153, + "bi": 717, + "bi": 3035, + "bia": 3841, + "biaf": 26961, + "biafra": 36355, + "bian": 19531, + "bian": 9027, + "bianca": 25854, + "bianchi": 45720, + "bians": 28141, + "bias": 11268, + "biased": 22178, + "bib": 44607, + "bib": 21022, + "bibi": 31182, + "bibl": 20912, + "bible": 26738, + "bible": 7583, + "bibli": 23465, + "biblical": 22841, + "biblio": 49131, + "bic": 5960, + "bic": 10675, + "bice": 35589, + "biceps": 46735, + "bick": 27238, + "bicy": 9247, + "bicycle": 11652, + "bicycles": 31326, + "bid": 21035, + "bid": 5553, + "bidding": 23237, + "bide": 45178, + "biden": 19451, + "bids": 16148, + "bie": 5561, + "bie": 4173, + "bieber": 48725, + "bieber": 7535, + "bien": 19176, + "bien": 25742, + "biennale": 33776, + "biennial": 36609, + "bier": 27226, + "bier": 23508, + "bies": 7867, + "big": 1915, + "big": 1205, + "bigbaldhead": 30325, + "bigbang": 41680, + "bigbang": 23734, + "bigdata": 9440, + "bige": 37762, + "bigfoot": 37095, + "bigg": 15312, + "bigg": 35399, + "biggboss": 27056, + "bigger": 6806, + "biggest": 19483, + "biggest": 3505, + "biggie": 28392, + "biggs": 46507, + "bigh": 18106, + "bighit": 35508, + "bigo": 14278, + "bigolive": 20735, + "bigotry": 37269, + "bigre": 36330, + "bih": 33471, + "bihar": 22849, + "bij": 42478, + "bik": 30306, + "bike": 11686, + "bike": 3701, + "biker": 36100, + "biker": 23449, + "bikers": 29468, + "bikes": 9227, + "bikin": 12638, + "biking": 19157, + "bikini": 14531, + "bil": 3092, + "bil": 20506, + "bilateral": 25599, + "bilbao": 34802, + "bild": 35512, + "bile": 25943, + "bilingual": 29623, + "bilities": 13582, + "bility": 4694, + "bill": 4444, + "bill": 2886, + "billboard": 10856, + "billboards": 34741, + "billed": 37558, + "billi": 7693, + "billie": 23990, + "billing": 31797, + "billings": 43615, + "billion": 14520, + "billion": 5729, + "billionaire": 19475, + "billionaires": 41590, + "billions": 20742, + "bills": 9810, + "billsmafia": 48845, + "billy": 15626, + "billy": 6814, + "bilt": 44770, + "bilt": 26654, + "bim": 46737, + "bim": 24775, + "bin": 4849, + "bin": 5346, + "binance": 43520, + "binary": 23497, + "bind": 44513, + "binder": 30541, + "binding": 21287, + "bine": 34848, + "bing": 24818, + "bing": 5665, + "binge": 22600, + "bingham": 43785, + "bingham": 47296, + "bingo": 18418, + "bino": 29172, + "bino": 24313, + "bins": 26934, + "bint": 43647, + "bio": 2830, + "bio": 5162, + "biode": 43502, + "biodegradable": 47740, + "biodiversity": 17428, + "biof": 45158, + "biographical": 49232, + "biography": 15423, + "biological": 18821, + "biologist": 35149, + "biology": 9796, + "biom": 13010, + "biomar": 44549, + "biomass": 36746, + "biome": 26218, + "biomed": 29280, + "biomedical": 33117, + "bionic": 46201, + "biop": 15009, + "biopic": 27942, + "bios": 48505, + "biotech": 22514, + "biotechnology": 40375, + "biotic": 33773, + "biotics": 41371, + "bious": 31845, + "bipartisan": 32266, + "bipolar": 37097, + "bique": 27809, + "bir": 921, + "bir": 16284, + "birch": 31569, + "birch": 22907, + "bird": 6908, + "bird": 3329, + "birdie": 29612, + "birdies": 45618, + "birding": 15851, + "birdman": 41915, + "birdphotography": 47999, + "birds": 41951, + "birds": 4337, + "birdwatching": 33497, + "birk": 48289, + "birken": 40661, + "birmin": 37482, + "birmingham": 38580, + "birmingham": 7720, + "birth": 1128, + "birth": 5397, + "birthday": 7381, + "birthday": 1166, + "birthdays": 17954, + "birthplace": 31429, + "biryani": 46489, + "bis": 5064, + "bis": 14461, + "biscu": 11532, + "biscuit": 18731, + "biscuits": 18248, + "bisexual": 36829, + "bish": 33690, + "bish": 31461, + "bishop": 20625, + "bishop": 8024, + "bishops": 31579, + "bison": 19741, + "bistro": 21770, + "bit": 3010, + "bit": 2010, + "bitcoin": 30848, + "bitcoin": 6366, + "bite": 41613, + "biting": 23016, + "bits": 7747, + "bitt": 39251, + "bius": 45525, + "bix": 46579, + "biz": 8212, + "biz": 5431, + "biza": 47013, + "bizar": 14886, + "bizarre": 16965, + "bizhour": 39462, + "bizitalk": 34929, + "bj": 4592, + "bj": 18229, + "bjj": 27437, + "bjor": 26525, + "bjp": 37264, + "bjp": 6178, + "bk": 15099, + "bk": 14083, + "bkk": 36433, + "bl": 833, + "bl": 9467, + "bla": 2205, + "bla": 19630, + "blac": 21008, + "black": 2025, + "black": 1449, + "blackand": 12809, + "blackandwhite": 23688, + "blackandwhite": 19506, + "blackandwhitephotography": 27544, + "blackberry": 16470, + "blackbird": 38526, + "blackburn": 23789, + "blackfish": 42193, + "blackfriday": 16445, + "blackgirl": 43591, + "blackhawks": 19203, + "blackhistory": 46982, + "blackhistorymonth": 20135, + "blacklist": 30295, + "blacklivesmatter": 23467, + "blackmail": 47295, + "blackops": 43519, + "blackout": 21733, + "blackpanther": 36592, + "blackpink": 20339, + "blackpool": 21031, + "blacks": 16351, + "blackwell": 42642, + "blad": 36635, + "bladder": 33593, + "blade": 10264, + "blades": 16893, + "blah": 29212, + "blaine": 32457, + "blair": 31824, + "blair": 14749, + "blake": 20229, + "blake": 9579, + "blame": 10695, + "blamed": 32906, + "blames": 27841, + "blaming": 29287, + "blan": 4609, + "blanc": 30936, + "blanc": 13301, + "blanca": 40670, + "blanchard": 40177, + "blanche": 34875, + "blanchett": 49378, + "blanco": 26801, + "bland": 44372, + "bland": 30799, + "blank": 15134, + "blanket": 12878, + "blankets": 24042, + "blanks": 48599, + "blasio": 35553, + "blasphe": 36622, + "blast": 46349, + "blast": 5964, + "blasted": 38976, + "blaster": 36341, + "blasting": 26178, + "blasts": 23067, + "blat": 22048, + "blatant": 41391, + "blatt": 39138, + "blau": 45307, + "blaz": 43413, + "blaze": 15497, + "blazer": 17606, + "blazers": 16984, + "blazing": 25267, + "bldg": 22981, + "ble": 1447, + "ble": 1059, + "bleach": 27034, + "bleak": 40355, + "bled": 12006, + "bleed": 23027, + "bleed": 24791, + "bleedblue": 39160, + "bleeding": 20311, + "bleeds": 47339, + "blen": 25651, + "blend": 10780, + "blended": 25813, + "blender": 25066, + "blending": 34307, + "blends": 28572, + "bler": 31305, + "bler": 11979, + "blers": 26930, + "bles": 5763, + "bless": 9640, + "bless": 5387, + "blessed": 4411, + "blessing": 10729, + "blessings": 11185, + "bleu": 30114, + "blew": 18176, + "bley": 43176, + "bli": 1450, + "bli": 28051, + "blin": 9678, + "blin": 5406, + "blind": 17248, + "blind": 8351, + "blinded": 49149, + "blindness": 38812, + "blinds": 32449, + "bling": 39764, + "bling": 7097, + "blink": 18976, + "bliss": 28531, + "bliss": 12893, + "blissful": 42145, + "blit": 39327, + "blitz": 42151, + "blitz": 17548, + "blizz": 13075, + "blizzard": 16111, + "blk": 42950, + "blk": 22872, + "blm": 30957, + "bln": 47348, + "blo": 1204, + "blo": 25505, + "blob": 49312, + "bloc": 30961, + "block": 4638, + "block": 4593, + "blockade": 33489, + "blockbuster": 19939, + "blockchain": 6653, + "blocked": 9106, + "blocker": 44767, + "blocking": 12652, + "blocks": 10113, + "blog": 16376, + "blog": 2589, + "blogg": 33282, + "blogged": 41380, + "blogger": 21352, + "blogger": 7806, + "bloggerrt": 48898, + "bloggers": 11627, + "blogging": 18090, + "blogpost": 41842, + "blogs": 16682, + "bloke": 24384, + "blom": 48996, + "blon": 7958, + "blond": 32426, + "blonde": 10711, + "blondes": 45130, + "blondie": 39236, + "bloo": 2373, + "blood": 9231, + "blood": 3590, + "blooded": 41946, + "bloodh": 48480, + "bloods": 39539, + "bloody": 38568, + "bloody": 9468, + "bloom": 7311, + "bloom": 10257, + "bloomberg": 43109, + "bloomberg": 21238, + "bloomfield": 40342, + "blooming": 45175, + "blooming": 19266, + "bloomington": 34731, + "blooms": 21439, + "bloss": 10017, + "blossom": 14472, + "blossoms": 21916, + "blot": 41710, + "blou": 44506, + "blouse": 23525, + "blow": 15230, + "blow": 10211, + "blower": 25832, + "blowing": 12087, + "blown": 11848, + "blowout": 34857, + "blows": 21063, + "blr": 47250, + "bls": 39458, + "blu": 1263, + "blu": 10273, + "blue": 3829, + "blue": 1746, + "bluebells": 47150, + "blueberries": 29551, + "blueberry": 18251, + "bluebird": 40747, + "bluec": 43194, + "bluef": 41174, + "bluegrass": 26241, + "bluejays": 18684, + "blueprint": 30594, + "blues": 17566, + "blues": 5159, + "blueslyrix": 47068, + "bluet": 13469, + "bluetooth": 14052, + "bluewave": 40025, + "bluff": 27232, + "bluffs": 48844, + "blum": 34818, + "blumen": 38714, + "blun": 34472, + "blunt": 19305, + "blur": 12102, + "blur": 27976, + "bluray": 36818, + "blurred": 38013, + "blurry": 21977, + "blush": 22889, + "blvd": 12578, + "bly": 20930, + "bly": 4426, + "bm": 4773, + "bm": 15916, + "bma": 42573, + "bmc": 27807, + "bmi": 40642, + "bmo": 39083, + "bms": 34074, + "bmw": 26637, + "bmw": 7869, + "bmx": 22535, + "bn": 10496, + "bn": 7992, + "bnb": 20010, + "bnha": 49336, + "bnp": 47910, + "bnw": 35903, + "bo": 647, + "bo": 2525, + "boa": 14732, + "boar": 7837, + "boar": 35473, + "board": 10419, + "board": 1972, + "boarded": 43052, + "boarder": 37414, + "boardgame": 47829, + "boardgames": 32646, + "boarding": 10086, + "boardroom": 47937, + "boards": 7963, + "boardwalk": 29043, + "boast": 44467, + "boasts": 30309, + "boat": 12426, + "boat": 4440, + "boath": 45461, + "boating": 21951, + "boats": 10080, + "boatsales": 46244, + "bob": 8444, + "bob": 4423, + "boba": 39948, + "bobb": 16891, + "bobble": 38796, + "bobblehead": 33451, + "bobby": 17847, + "bobby": 7816, + "bobc": 26153, + "bobcat": 37896, + "bobcats": 27568, + "bobo": 38939, + "bobs": 45533, + "boc": 27307, + "boc": 39042, + "boca": 26094, + "bock": 24961, + "bod": 17904, + "bod": 26340, + "boda": 42030, + "bode": 28452, + "bode": 40429, + "bodega": 47350, + "bodied": 36892, + "bodies": 9799, + "bodily": 49119, + "body": 7132, + "body": 1774, + "bodybuilding": 24538, + "bodyguard": 35565, + "boe": 23476, + "boe": 21773, + "boeh": 38002, + "boehner": 44599, + "boeing": 48135, + "boeing": 11857, + "boer": 44889, + "boer": 40768, + "bog": 23426, + "bog": 28318, + "bogo": 35769, + "bogota": 47059, + "bogus": 42907, + "boh": 43238, + "bohe": 40541, + "bohemi": 21552, + "bohemian": 25753, + "boho": 25444, + "boi": 37129, + "boi": 12673, + "boil": 31332, + "boiled": 23886, + "boiler": 28212, + "boiler": 25615, + "boiling": 32019, + "bois": 47742, + "bois": 21640, + "boise": 23304, + "bok": 26671, + "bok": 15289, + "boko": 30929, + "boks": 40216, + "bol": 2860, + "bol": 8413, + "bola": 12840, + "bold": 26975, + "bold": 8911, + "boldand": 48413, + "boldly": 44778, + "boli": 12722, + "bolic": 27343, + "bolivia": 28628, + "bollah": 36336, + "bolly": 25302, + "bollywood": 32448, + "bollywood": 9604, + "bolo": 40236, + "bolog": 22818, + "bologna": 27513, + "bolster": 47304, + "bolt": 13131, + "bolton": 48757, + "bolton": 16598, + "bolts": 26028, + "bom": 3012, + "bom": 19469, + "bomb": 18091, + "bomb": 6331, + "bombar": 25544, + "bombardier": 42700, + "bombay": 48602, + "bombay": 23890, + "bombed": 24542, + "bomber": 15436, + "bombers": 21786, + "bombing": 14475, + "bombings": 43236, + "bombs": 14410, + "bombshell": 36340, + "bon": 1871, + "bon": 4216, + "bona": 33342, + "bonanza": 40304, + "bond": 37022, + "bond": 6826, + "bonded": 37390, + "bondi": 40092, + "bonding": 19609, + "bonds": 15786, + "bone": 22502, + "bone": 6195, + "bones": 9476, + "bonfire": 23151, + "bongo": 47519, + "boni": 32269, + "boni": 46356, + "bonita": 42896, + "bonjour": 33176, + "bonkers": 39865, + "bonn": 38969, + "bonnar": 47191, + "bonnaroo": 48777, + "bonne": 25844, + "bonnet": 30636, + "bonnie": 18555, + "bono": 24476, + "bons": 42883, + "bonsai": 44129, + "bonus": 8164, + "bonuses": 35144, + "boo": 824, + "boo": 7317, + "boogie": 22639, + "book": 2828, + "book": 1116, + "bookboost": 31257, + "bookclub": 34438, + "bookday": 26327, + "booked": 12584, + "booker": 21302, + "bookfest": 39381, + "booking": 10145, + "bookings": 18345, + "booklet": 27405, + "bookmark": 33596, + "bookof": 45629, + "bookreview": 27362, + "books": 44382, + "books": 2161, + "bookshelf": 34821, + "bookshop": 24705, + "bookstore": 17999, + "bookstores": 46416, + "bookworm": 20743, + "boom": 9609, + "boom": 7121, + "boomer": 33819, + "boomer": 31766, + "boomers": 37988, + "booming": 33487, + "boon": 24979, + "boon": 35821, + "boone": 23453, + "boop": 45047, + "boost": 44639, + "boost": 6260, + "boosted": 37631, + "booster": 20877, + "boosters": 46859, + "boosting": 28480, + "boosts": 29247, + "boot": 10843, + "boot": 8087, + "bootcamp": 22051, + "booted": 42564, + "booth": 47895, + "booth": 3971, + "booths": 32653, + "booties": 46188, + "bootleg": 38139, + "boots": 7319, + "booze": 24341, + "bop": 19720, + "bor": 1141, + "bor": 15093, + "bora": 24736, + "bord": 36891, + "bordeaux": 22009, + "border": 16304, + "border": 6177, + "borderlands": 38676, + "borders": 13900, + "bore": 14084, + "bore": 24638, + "bored": 8933, + "boredom": 31460, + "boretum": 38902, + "borg": 14770, + "borgh": 17180, + "boring": 12519, + "boris": 31212, + "boris": 15704, + "borisjohnson": 44481, + "born": 17695, + "born": 2683, + "borne": 42910, + "borne": 9328, + "borneo": 33332, + "bornon": 41811, + "bornonthisday": 42757, + "boro": 26796, + "boro": 7974, + "borough": 22761, + "borough": 6203, + "borrow": 22293, + "borrowed": 28224, + "borrowing": 41045, + "borussia": 36764, + "bos": 14885, + "bos": 9644, + "bosa": 46946, + "bosch": 42009, + "bosch": 19466, + "bosco": 36960, + "bose": 23142, + "bosh": 42244, + "bosni": 42924, + "bosnia": 31396, + "boss": 17935, + "boss": 4206, + "bosses": 23906, + "boston": 11540, + "boston": 4399, + "bostonmarathon": 44533, + "bot": 4136, + "bot": 6947, + "botan": 12554, + "botanic": 32560, + "botanical": 21026, + "botany": 22612, + "botd": 34451, + "both": 36575, + "both": 2212, + "bother": 21125, + "bothered": 27997, + "botox": 43449, + "bots": 13721, + "botswana": 27584, + "bott": 3520, + "bott": 37225, + "bottle": 37306, + "bottle": 5392, + "bottled": 29331, + "bottlen": 46439, + "bottles": 9754, + "bottling": 42006, + "bottom": 32314, + "bottom": 5931, + "bottoms": 31524, + "bou": 3728, + "bou": 23165, + "bouchard": 47930, + "boudo": 48827, + "bought": 4142, + "boul": 24830, + "boulder": 18260, + "boule": 17652, + "boulevard": 19504, + "boun": 5993, + "bounce": 14316, + "bouncing": 32060, + "bouncy": 43415, + "bound": 15140, + "bound": 4567, + "boundaries": 18690, + "boundary": 21344, + "bounds": 37469, + "bounty": 21142, + "bouquet": 20961, + "bour": 2934, + "bour": 35486, + "bourbon": 48118, + "bourbon": 14652, + "bourdain": 48095, + "bourg": 20690, + "bourgeo": 45672, + "bourn": 39143, + "bourne": 13789, + "bourne": 5192, + "bournemouth": 20911, + "bout": 19982, + "bout": 8123, + "bouti": 10926, + "boutique": 12179, + "bow": 2297, + "bow": 4040, + "bowden": 48538, + "bowed": 49130, + "bowel": 36880, + "bowen": 25368, + "bower": 40414, + "bowers": 42238, + "bowie": 13036, + "bowing": 46398, + "bowl": 26719, + "bowl": 3814, + "bowled": 39987, + "bowler": 25528, + "bowlers": 42632, + "bowles": 41611, + "bowling": 10390, + "bowls": 17787, + "bowman": 22052, + "bows": 17000, + "bowser": 38234, + "bowski": 48311, + "box": 2774, + "box": 2063, + "boxed": 24190, + "boxer": 40394, + "boxer": 15363, + "boxers": 31019, + "boxes": 8350, + "boxing": 33669, + "boxing": 5554, + "boy": 2927, + "boy": 1876, + "boyband": 31568, + "boyce": 44480, + "boycot": 46208, + "boycott": 31615, + "boycott": 19559, + "boyd": 18295, + "boyfriend": 7328, + "boyfriends": 36541, + "boyle": 22802, + "boys": 25223, + "boys": 2034, + "boyz": 16152, + "bp": 23410, + "bp": 11558, + "bpa": 43855, + "bpd": 48587, + "bpl": 28901, + "bpm": 40338, + "bps": 37794, + "br": 711, + "br": 7532, + "bra": 1195, + "bra": 5860, + "brac": 6663, + "brace": 8376, + "brace": 9183, + "bracelet": 8969, + "bracelets": 20027, + "braces": 19249, + "brack": 25676, + "bracket": 14780, + "brackets": 36183, + "brad": 4848, + "brad": 9405, + "bradbury": 45097, + "braden": 46842, + "bradford": 15062, + "bradley": 31905, + "bradley": 10952, + "brador": 24062, + "bradshaw": 37556, + "brady": 42494, + "brady": 11117, + "brae": 42874, + "brae": 40040, + "brag": 30110, + "bragg": 38545, + "bragging": 38199, + "brah": 20276, + "brahms": 45114, + "brai": 25048, + "braid": 31067, + "braided": 39997, + "braids": 34221, + "brain": 9454, + "brain": 4812, + "brains": 17129, + "brainstorming": 36607, + "braised": 28363, + "brake": 14937, + "brakes": 23456, + "bral": 31309, + "bram": 14815, + "bram": 39456, + "brampton": 35124, + "bran": 3684, + "bran": 28348, + "brance": 36072, + "brance": 15413, + "branch": 7998, + "branches": 15843, + "brand": 3910, + "brand": 2896, + "branded": 18097, + "brandi": 41003, + "branding": 10841, + "brando": 41892, + "brandon": 20423, + "brandon": 9166, + "brands": 8681, + "brandt": 22552, + "brandy": 26232, + "brane": 32340, + "branson": 28280, + "brant": 28951, + "brant": 47592, + "braries": 46377, + "brary": 24520, + "bras": 22611, + "brasil": 18991, + "brass": 24348, + "brass": 11655, + "brat": 26717, + "brat": 26631, + "brate": 41864, + "braun": 39129, + "braun": 29309, + "brave": 25461, + "brave": 7769, + "braved": 47663, + "bravely": 42303, + "bravery": 25831, + "braves": 14422, + "braving": 43258, + "bravo": 38613, + "bravo": 13006, + "braw": 37871, + "brawl": 26066, + "braxton": 37451, + "bray": 26256, + "bray": 22993, + "braz": 4625, + "brazil": 47459, + "brazil": 6305, + "brazili": 45697, + "brazilian": 12111, + "brb": 25316, + "brc": 40393, + "bre": 887, + "bre": 7782, + "brea": 7318, + "brea": 46538, + "breach": 21363, + "breaches": 45173, + "bread": 18886, + "bread": 5066, + "breads": 43064, + "break": 2206, + "break": 2568, + "breakable": 30691, + "breakaway": 42732, + "breakdown": 14519, + "breaker": 14814, + "breakers": 22270, + "breakfa": 45931, + "breakfast": 30210, + "breakfast": 3290, + "breaking": 14698, + "breaking": 2755, + "breakingbad": 38032, + "breakingnews": 23837, + "breakout": 16752, + "breaks": 7263, + "breakthrough": 18802, + "breakup": 38931, + "breast": 12930, + "breast": 9475, + "breastcancer": 40813, + "breastcancer": 30065, + "breastfeeding": 29033, + "breasts": 37637, + "breath": 9508, + "breath": 9576, + "breathe": 11364, + "breathing": 14959, + "breathtaking": 14709, + "brecht": 34622, + "breck": 44598, + "bred": 46929, + "bred": 16008, + "bree": 7892, + "bree": 37138, + "breed": 28030, + "breed": 13791, + "breeders": 37472, + "breeding": 16544, + "breeds": 29021, + "breen": 48013, + "brees": 46721, + "breeze": 13125, + "breezy": 21451, + "breit": 23864, + "breitbart": 37926, + "brek": 35494, + "bremen": 39861, + "bren": 5209, + "brenda": 23786, + "brendan": 35134, + "brendan": 15414, + "brendon": 36756, + "brennan": 22372, + "brenner": 42941, + "brent": 31439, + "brent": 16355, + "brentwood": 33108, + "brero": 47781, + "bres": 32561, + "bret": 38020, + "bret": 32548, + "brethren": 43134, + "breton": 32290, + "brett": 22591, + "brett": 12394, + "brev": 42882, + "brevi": 39475, + "brew": 5048, + "brew": 7253, + "brewco": 33582, + "brewed": 23238, + "brewer": 20756, + "breweries": 35277, + "brewers": 17618, + "brewery": 8850, + "brewing": 8275, + "brewingco": 45155, + "brews": 21663, + "brewster": 40274, + "brex": 22726, + "brexit": 27666, + "brexit": 5801, + "brgy": 35983, + "bri": 1036, + "bri": 18636, + "bria": 35890, + "brian": 9824, + "brian": 4989, + "brianna": 32308, + "briar": 46119, + "bribe": 40042, + "bribery": 41792, + "bric": 27055, + "brice": 40190, + "brick": 13937, + "brick": 9518, + "bricks": 21029, + "brics": 48196, + "brid": 16995, + "bridal": 36875, + "bridal": 14284, + "bride": 18342, + "bride": 8964, + "brides": 18067, + "bridesma": 28356, + "bridesmaid": 43399, + "bridesmaids": 47754, + "bridg": 20623, + "bridge": 8647, + "bridge": 2465, + "bridgeport": 45201, + "bridges": 11811, + "bridget": 27073, + "bridgewater": 38732, + "bridging": 38109, + "brie": 26622, + "brief": 9435, + "brief": 8954, + "briefed": 47326, + "briefing": 12991, + "briefly": 26980, + "briefs": 29557, + "brien": 13504, + "brier": 43995, + "brig": 11081, + "briga": 46448, + "brigade": 16032, + "briggs": 28108, + "brigh": 6710, + "bright": 10383, + "bright": 4852, + "brighten": 18208, + "brightening": 43929, + "brighter": 18507, + "brightest": 26159, + "brightly": 36298, + "brightness": 42280, + "brighton": 28416, + "brighton": 9470, + "brigitte": 44421, + "brill": 27342, + "brill": 28601, + "brilli": 3821, + "brilliance": 28146, + "brilliant": 4106, + "brilliantly": 26803, + "brin": 25620, + "bring": 11596, + "bring": 2430, + "bringback": 28969, + "bringbackour": 45403, + "bringing": 4777, + "brings": 5138, + "brink": 39296, + "brink": 28796, + "brioche": 45818, + "bris": 9385, + "bris": 15783, + "brisban": 30431, + "brisbane": 42932, + "brisbane": 12407, + "brisk": 43646, + "brisket": 31920, + "bristol": 18159, + "bristol": 8010, + "brit": 2318, + "brit": 20066, + "britain": 40802, + "britain": 6272, + "britanni": 31373, + "britannia": 36188, + "brite": 33827, + "briti": 8155, + "british": 8651, + "british": 3504, + "britishmuseum": 41858, + "britney": 37192, + "britney": 21853, + "britneyspears": 42990, + "brits": 21832, + "britt": 10811, + "britt": 25976, + "brittany": 38187, + "brittany": 18818, + "britton": 37422, + "brium": 46079, + "brixton": 30056, + "bro": 927, + "bro": 4410, + "broad": 3491, + "broad": 12623, + "broadband": 21050, + "broadcast": 8967, + "broadcaster": 29005, + "broadcasting": 14403, + "broadcasts": 46742, + "broader": 36029, + "broadway": 34599, + "broadway": 9092, + "broc": 15587, + "broccoli": 19094, + "broch": 21419, + "brochure": 25275, + "brock": 14841, + "brock": 16745, + "brodie": 42150, + "brody": 29608, + "broke": 42165, + "broke": 6509, + "broken": 26126, + "broken": 5107, + "broker": 34032, + "broker": 20449, + "brokerage": 41327, + "brokers": 28271, + "brom": 18972, + "brom": 33296, + "bromance": 35353, + "bromley": 35715, + "bron": 4011, + "bron": 10243, + "bronco": 43488, + "bronco": 34370, + "broncos": 12516, + "bronson": 37042, + "bronte": 48936, + "bronx": 48310, + "bronx": 17183, + "brony": 21084, + "bronze": 8459, + "broo": 5204, + "brooch": 21207, + "brook": 4782, + "brook": 7322, + "brooke": 28576, + "brooke": 12549, + "brookes": 39707, + "brooklyn": 23253, + "brooklyn": 6983, + "brooks": 42779, + "brooks": 9991, + "broom": 32046, + "broom": 28008, + "broome": 49335, + "bros": 7776, + "broth": 29994, + "brotha": 33974, + "brother": 12697, + "brother": 3157, + "brotherhood": 19059, + "brothers": 4548, + "brou": 27874, + "brough": 21033, + "brought": 4222, + "brov": 42881, + "brow": 6547, + "brow": 15895, + "broward": 34719, + "brown": 6315, + "brown": 2866, + "browne": 28440, + "brownie": 23045, + "brownies": 22312, + "browning": 32241, + "browns": 14051, + "brows": 14998, + "browse": 19060, + "browser": 19768, + "browsing": 29318, + "brox": 43539, + "brs": 47485, + "brt": 46936, + "bru": 1698, + "bru": 31028, + "bruce": 21223, + "bruce": 7085, + "bruh": 17575, + "bruins": 14736, + "bruise": 48048, + "bruised": 46502, + "brum": 23862, + "brum": 28078, + "brun": 6870, + "brunch": 9113, + "brune": 29057, + "brunei": 41898, + "brunette": 35528, + "bruno": 14568, + "brunomars": 41156, + "brunswick": 24012, + "brush": 27969, + "brush": 8594, + "brushed": 30298, + "brushes": 21550, + "brushing": 35072, + "brussels": 11020, + "brut": 39499, + "brutal": 42144, + "brutal": 14556, + "brutality": 31348, + "brutally": 28132, + "brute": 47552, + "brux": 49093, + "bry": 6587, + "bry": 28228, + "bryan": 16134, + "bryan": 10412, + "bryant": 12256, + "bryce": 19895, + "bryn": 36569, + "bryn": 42877, + "bryson": 38990, + "bs": 11783, + "bs": 1329, + "bsa": 46619, + "bsb": 23070, + "bsbi": 41728, + "bsbibotany": 42086, + "bsc": 32031, + "bsd": 41848, + "bse": 46341, + "bsf": 48314, + "bsgo": 48474, + "bsp": 47977, + "bst": 19698, + "bsu": 46385, + "bt": 3317, + "bt": 4205, + "btc": 10315, + "btcc": 30759, + "btn": 44681, + "bto": 35516, + "btob": 29379, + "btr": 39767, + "bts": 15154, + "bts": 4007, + "btsarmy": 30302, + "btsbbmas": 35297, + "btsx": 44971, + "btv": 38541, + "btw": 9520, + "btwn": 28284, + "bu": 609, + "bu": 5831, + "bub": 27704, + "bub": 33158, + "bubb": 9739, + "bubba": 28149, + "bubble": 28687, + "bubble": 10799, + "bubblegum": 48078, + "bubbles": 17648, + "bubbly": 31034, + "buc": 8207, + "buccane": 32830, + "buccaneers": 38058, + "buch": 22623, + "bucha": 43582, + "buchan": 27237, + "buchanan": 28975, + "bucharest": 37013, + "buck": 6061, + "buck": 11433, + "bucket": 22596, + "bucket": 10498, + "bucketlist": 30778, + "buckets": 27168, + "buckeye": 34549, + "buckeyes": 30741, + "buckingham": 28736, + "buckle": 21948, + "buckley": 25905, + "bucks": 6103, + "bucky": 35916, + "bucs": 20011, + "bud": 2942, + "bud": 10737, + "buda": 18520, + "buda": 49012, + "budapest": 19202, + "budd": 7296, + "buddha": 13981, + "buddhism": 23744, + "buddhist": 18697, + "buddies": 14543, + "budding": 31992, + "buddy": 40948, + "buddy": 6557, + "budge": 32005, + "budget": 46758, + "budget": 5639, + "budgeting": 43789, + "budgets": 36419, + "buds": 14665, + "budweiser": 40900, + "buen": 15640, + "buena": 30876, + "buenas": 48529, + "bueno": 46202, + "buenos": 26055, + "buf": 44417, + "buff": 5456, + "buff": 21416, + "buffal": 25836, + "buffalo": 31231, + "buffalo": 8054, + "buffalob": 38831, + "buffalobills": 44352, + "buffe": 13724, + "buffer": 33050, + "buffet": 17829, + "buffett": 34081, + "buffs": 28906, + "buffy": 33356, + "bug": 14453, + "bug": 8162, + "bugatti": 35451, + "buggy": 28963, + "bugs": 13850, + "buh": 31406, + "buhari": 14661, + "buick": 22000, + "buil": 1354, + "build": 22739, + "build": 3289, + "builder": 14474, + "builders": 17694, + "building": 21206, + "building": 2307, + "buildings": 8866, + "builds": 16449, + "buildthe": 41497, + "built": 45824, + "built": 3874, + "buk": 28084, + "buk": 24317, + "buka": 47778, + "bukit": 39888, + "bul": 2572, + "bul": 10200, + "bula": 18726, + "bulaga": 41575, + "bular": 32187, + "bulb": 22373, + "bulbs": 24808, + "bulgar": 15424, + "bulgaria": 20295, + "bulgarian": 38693, + "bulge": 47603, + "bulk": 19643, + "bull": 4537, + "bull": 6029, + "bulldo": 37675, + "bulldog": 34828, + "bulldog": 15611, + "bulldogs": 13916, + "bullet": 14340, + "bullet": 12465, + "bulletin": 19638, + "bulletproof": 43212, + "bullets": 22117, + "bullied": 34689, + "bullies": 39050, + "bullion": 49114, + "bullish": 22142, + "bullock": 33198, + "bullpen": 38081, + "bulls": 10313, + "bully": 43111, + "bully": 20190, + "bullying": 13548, + "bum": 27683, + "bum": 14226, + "bumble": 25585, + "bumble": 39303, + "bumblebee": 36911, + "bummed": 48456, + "bump": 9783, + "bump": 15877, + "bumped": 22495, + "bumper": 17881, + "bumping": 40196, + "bumps": 21115, + "bun": 2591, + "bun": 13665, + "bunch": 7796, + "bund": 41905, + "bunde": 18841, + "bundesliga": 21582, + "bundle": 11793, + "bundled": 47228, + "bundles": 29834, + "bundy": 37332, + "bung": 44748, + "bungal": 29549, + "bungalow": 33696, + "bunk": 41236, + "bunker": 23615, + "bunnies": 28998, + "bunny": 34198, + "bunny": 9258, + "buns": 22235, + "bunting": 30695, + "buon": 31350, + "buon": 48498, + "bur": 1039, + "bur": 17362, + "burbank": 34862, + "burberry": 30412, + "burch": 44588, + "burden": 18687, + "bure": 11902, + "bureau": 32098, + "bureau": 15400, + "burg": 19505, + "burg": 3499, + "burge": 20522, + "burger": 22356, + "burger": 6548, + "burgers": 13007, + "burgess": 26211, + "burgh": 18141, + "burgh": 4965, + "burgl": 25554, + "burglar": 43365, + "burglary": 32573, + "burgring": 40823, + "burgundy": 23650, + "buri": 46348, + "buri": 42614, + "burial": 22012, + "buried": 14233, + "burk": 48822, + "burke": 15340, + "burle": 27891, + "burlesque": 33732, + "burlington": 23370, + "burma": 30305, + "burmese": 47906, + "burn": 7934, + "burn": 4285, + "burnaby": 47541, + "burne": 27246, + "burned": 15022, + "burner": 23243, + "burnett": 28558, + "burnham": 36111, + "burning": 46107, + "burning": 8405, + "burnley": 24653, + "burnout": 36078, + "burns": 10234, + "burnt": 15185, + "burr": 30879, + "burrell": 49045, + "burrito": 23473, + "burritos": 47245, + "burroughs": 41337, + "burrows": 44846, + "burst": 13005, + "bursting": 32566, + "bursts": 37026, + "burt": 27162, + "burton": 42354, + "burton": 12704, + "burundi": 33595, + "bury": 12276, + "bury": 3899, + "burys": 32362, + "bus": 1319, + "bus": 2840, + "busan": 40172, + "busc": 35000, + "busch": 20475, + "buses": 12879, + "bush": 11191, + "bush": 6867, + "bushes": 37578, + "busiest": 32764, + "busine": 4598, + "busines": 25364, + "business": 8346, + "business": 1716, + "businesses": 7287, + "businessman": 25635, + "buss": 47764, + "bust": 31299, + "bust": 9959, + "busted": 18643, + "buster": 37219, + "buster": 12094, + "busters": 16362, + "busting": 29622, + "busy": 39332, + "busy": 4354, + "but": 2201, + "but": 767, + "butch": 35102, + "butcher": 18732, + "butchers": 42334, + "bute": 39240, + "butes": 14630, + "butler": 35867, + "butler": 10702, + "butt": 12500, + "butt": 31523, + "butte": 31678, + "butter": 5427, + "butter": 6952, + "butterflies": 16232, + "butterfly": 9738, + "buttermilk": 40180, + "butternut": 36867, + "buttery": 45535, + "button": 45480, + "button": 8007, + "buttons": 16188, + "butts": 25309, + "buu": 42313, + "buuren": 47752, + "buxton": 41370, + "buy": 11632, + "buy": 2131, + "buyer": 14682, + "buyers": 14663, + "buying": 6566, + "buys": 15560, + "buzz": 7866, + "buzz": 8706, + "buzzard": 47434, + "buzzer": 38064, + "buzzfeed": 26613, + "buzzing": 18511, + "bv": 18958, + "bv": 35861, + "bvb": 22454, + "bw": 17672, + "bw": 15120, + "bway": 26652, + "bwfc": 40918, + "bwo": 45902, + "bx": 33633, + "by": 1713, + "by": 638, + "bye": 20076, + "bye": 4460, + "byes": 47958, + "byl": 34994, + "byn": 46917, + "byn": 11890, + "byo": 28039, + "bypass": 26530, + "byr": 15534, + "byrd": 30369, + "byrne": 19676, + "byron": 43504, + "byron": 19775, + "bys": 26740, + "bystand": 46138, + "byte": 42798, + "bytes": 39538, + "bythe": 36621, + "byu": 41072, + "byu": 23770, + "byz": 35406, + "byzantine": 44081, + "bz": 13631, + "bé": 40365, + "bü": 38706, + "c": 66, + "c": 322, + "ca": 772, + "ca": 1684, + "caa": 19316, + "cab": 3033, + "cab": 11912, + "cabaret": 26263, + "cabbage": 18407, + "cabe": 32731, + "cabello": 34371, + "caber": 29062, + "cabernet": 33730, + "cabin": 14178, + "cabine": 23354, + "cabinet": 9937, + "cabinets": 33083, + "cabins": 48455, + "cable": 7925, + "cables": 22408, + "cabo": 37318, + "cabo": 28370, + "cabrera": 42338, + "cabs": 42048, + "cac": 8298, + "cac": 23872, + "cacao": 38022, + "cache": 28993, + "caching": 40655, + "cactus": 19794, + "cad": 6297, + "cad": 20166, + "caday": 34187, + "cadbury": 44698, + "caddy": 41521, + "cade": 10497, + "cade": 17306, + "cadet": 22764, + "cadets": 19160, + "cadillac": 18156, + "cae": 49264, + "caer": 28298, + "caes": 15740, + "caesar": 21642, + "caesars": 42162, + "caf": 3471, + "caf": 20867, + "cafc": 30748, + "cafe": 15201, + "cafe": 4979, + "cafes": 40166, + "cafeteria": 32817, + "caffe": 18258, + "caffe": 45416, + "caffeine": 22487, + "café": 15304, + "cag": 15714, + "cage": 11838, + "cages": 37939, + "cah": 40519, + "cahill": 33185, + "cai": 38971, + "cai": 36116, + "cain": 13747, + "caine": 16799, + "cair": 15804, + "cair": 46659, + "cairn": 31264, + "cairn": 42467, + "cairngor": 44067, + "cairns": 32941, + "cairo": 19615, + "cait": 14116, + "caitlin": 47768, + "caitlin": 26809, + "caitlyn": 35763, + "cajun": 43425, + "cajun": 33044, + "cak": 42986, + "cake": 15295, + "cake": 2972, + "cakeday": 46207, + "cakes": 5950, + "cal": 1198, + "cal": 6372, + "cala": 32133, + "calab": 31795, + "calais": 39886, + "calam": 28841, + "calc": 45055, + "calci": 22824, + "calcium": 27815, + "calcu": 15328, + "calcul": 15734, + "calculate": 37656, + "calculated": 40688, + "calculations": 44605, + "calculator": 26093, + "calculus": 35104, + "calcutta": 42901, + "calder": 29372, + "calder": 36817, + "caldwell": 30484, + "cale": 32674, + "caleb": 19619, + "caled": 28421, + "calend": 6057, + "calendar": 7122, + "calendars": 17229, + "calf": 17508, + "calgary": 27415, + "calgary": 10797, + "calhoun": 38929, + "cali": 2857, + "cali": 16337, + "caliber": 32820, + "calibr": 32597, + "calico": 45379, + "calif": 30839, + "califor": 3526, + "californi": 21303, + "california": 3729, + "call": 7950, + "call": 1620, + "calla": 20658, + "callahan": 43313, + "callaway": 42596, + "callback": 44764, + "calle": 47699, + "calle": 38144, + "called": 2726, + "caller": 30666, + "calli": 16338, + "callie": 36512, + "calligraphy": 27775, + "calling": 4597, + "callister": 49026, + "callme": 42449, + "callof": 41280, + "calls": 4572, + "callum": 23224, + "calm": 34990, + "calm": 7011, + "calming": 30690, + "calorie": 32679, + "calories": 18029, + "cals": 47714, + "calum": 16405, + "calvary": 40169, + "calvert": 47134, + "calves": 31857, + "calvin": 27642, + "calvin": 17345, + "caly": 10244, + "calyp": 29851, + "cam": 1004, + "cam": 5982, + "camar": 31991, + "camber": 44362, + "cambo": 14662, + "cambodia": 17347, + "cambridge": 24651, + "cambridge": 9334, + "cambridgeshire": 46139, + "camden": 38735, + "camden": 17984, + "came": 1986, + "camel": 27005, + "camel": 21914, + "camels": 41357, + "cameo": 19492, + "camer": 4961, + "camera": 3934, + "cameraman": 43347, + "cameras": 12172, + "camero": 20320, + "cameron": 19634, + "cameron": 8057, + "camerondallas": 40587, + "cameroon": 24061, + "camil": 37745, + "camila": 19919, + "camilla": 38897, + "camille": 26741, + "camino": 28529, + "camo": 28702, + "camo": 19716, + "camogie": 39547, + "camou": 23588, + "camoufla": 23667, + "camouflage": 29049, + "camp": 2854, + "camp": 2877, + "campa": 2793, + "campaig": 9448, + "campaign": 44524, + "campaign": 3193, + "campaigner": 46364, + "campaigners": 40272, + "campaigning": 19594, + "campaigns": 15669, + "campan": 31765, + "campbell": 29094, + "campbell": 8806, + "campe": 16672, + "campeon": 49109, + "campeones": 30105, + "camper": 41914, + "camper": 24522, + "campers": 26619, + "campfire": 32530, + "campground": 46969, + "camping": 9982, + "campo": 27600, + "campos": 48077, + "camps": 12806, + "campsite": 44243, + "campu": 19687, + "campus": 4560, + "campuses": 31895, + "camra": 46155, + "camry": 46472, + "cams": 32590, + "can": 950, + "can": 753, + "cana": 28341, + "canad": 13193, + "canada": 2698, + "canadaday": 39800, + "canadi": 4329, + "canadian": 22160, + "canadian": 5255, + "canadians": 18989, + "canadiens": 40932, + "canal": 28585, + "canal": 9535, + "canals": 38483, + "canaria": 47117, + "canary": 40409, + "canary": 24523, + "canberra": 16719, + "canc": 43189, + "cancel": 12026, + "cancel": 21546, + "canceled": 25874, + "cancell": 28027, + "cancellation": 38765, + "cancelled": 13270, + "cancels": 34089, + "cancer": 12690, + "cancer": 3148, + "cancers": 33201, + "cancun": 34721, + "cand": 4986, + "candace": 45623, + "candel": 47834, + "candi": 6034, + "candice": 30024, + "candid": 7884, + "candid": 19206, + "candidacy": 46248, + "candidate": 6475, + "candidates": 8619, + "candied": 43982, + "candies": 46305, + "candle": 18995, + "candle": 12674, + "candlelight": 34724, + "candles": 15472, + "candy": 20741, + "candy": 6417, + "cane": 23644, + "cane": 14716, + "canelo": 43210, + "canes": 21902, + "cani": 35592, + "canine": 27380, + "cann": 4139, + "cann": 23709, + "cannab": 7577, + "cannabis": 31837, + "cannabis": 8861, + "canne": 44252, + "canned": 27290, + "cannes": 13773, + "canni": 26389, + "canning": 38621, + "cannon": 28771, + "cannon": 15661, + "cannons": 46269, + "cannot": 4785, + "canny": 26986, + "cano": 31668, + "cano": 25937, + "canoe": 23503, + "canola": 40389, + "canon": 17749, + "canon": 9310, + "canopy": 26061, + "cans": 13707, + "cant": 13395, + "cant": 5784, + "canteen": 39230, + "canter": 19301, + "canterbury": 22271, + "canti": 42845, + "cantina": 47472, + "canton": 37735, + "canton": 25363, + "cantore": 41769, + "cantwait": 33760, + "canu": 20171, + "canucks": 24321, + "canv": 30714, + "canvas": 22441, + "canvas": 7483, + "canvass": 40054, + "canvassing": 33783, + "cany": 47674, + "canyon": 41246, + "canyon": 9755, + "cao": 29207, + "cap": 1289, + "cap": 3938, + "capabilities": 19512, + "capability": 25885, + "capable": 14742, + "capac": 24665, + "capacity": 8970, + "capcom": 28342, + "cape": 10288, + "cape": 6631, + "capecod": 41339, + "capes": 38785, + "capetown": 20059, + "capit": 6889, + "capita": 41833, + "capital": 11198, + "capital": 5439, + "capitalism": 20068, + "capitalist": 37015, + "capitals": 29579, + "capitol": 43880, + "capitol": 11375, + "capo": 45477, + "capp": 16718, + "capped": 24659, + "capping": 42656, + "cappuccino": 37402, + "capri": 48699, + "capri": 30982, + "capric": 28667, + "capricorn": 46314, + "caps": 23185, + "capsu": 15608, + "capsul": 40341, + "capsule": 20627, + "capsules": 32870, + "capt": 45815, + "capt": 17369, + "captain": 14958, + "captain": 4621, + "captainamerica": 46229, + "captainmarvel": 48492, + "captains": 18706, + "caption": 11327, + "captions": 41878, + "captiv": 19776, + "captivating": 30580, + "captive": 29038, + "captivity": 41141, + "capture": 8818, + "captured": 8020, + "captures": 15305, + "capturing": 19548, + "capu": 44241, + "car": 811, + "car": 1615, + "cara": 20016, + "carab": 32251, + "carac": 30029, + "caracas": 45854, + "caramel": 14788, + "carameli": 41739, + "caramelized": 43854, + "carat": 32981, + "carav": 13814, + "caravan": 18566, + "carb": 21379, + "carbo": 43235, + "carbon": 14038, + "carbon": 7549, + "carbs": 29313, + "carcin": 31587, + "carcinoma": 46810, + "card": 10793, + "card": 2601, + "cardam": 49008, + "cardboard": 19845, + "cardi": 6211, + "cardi": 29677, + "cardiac": 21256, + "cardiff": 22488, + "cardiff": 9781, + "cardigan": 30501, + "cardin": 8457, + "cardinal": 46310, + "cardinal": 16472, + "cardinals": 12837, + "cardio": 15003, + "cardio": 23455, + "cardiology": 37276, + "cardiovascular": 29291, + "cardo": 40625, + "cards": 4094, + "care": 2050, + "care": 1776, + "cared": 27675, + "career": 20609, + "career": 3061, + "careers": 10090, + "careful": 11999, + "carefully": 15789, + "caregi": 22042, + "caregiver": 46372, + "caregivers": 35909, + "careless": 47325, + "carers": 26484, + "cares": 10968, + "caretaker": 48037, + "carey": 14895, + "cargo": 12490, + "cari": 18497, + "cari": 37273, + "carib": 9757, + "caribbean": 10368, + "caribou": 42135, + "caric": 25337, + "caricature": 38857, + "carina": 44357, + "caring": 13083, + "carl": 8273, + "carl": 9482, + "carla": 25552, + "carleton": 46496, + "carlin": 47559, + "carlisle": 23276, + "carlo": 17861, + "carlo": 15266, + "carlos": 9538, + "carlow": 44745, + "carls": 39635, + "carlson": 24114, + "carlton": 18934, + "carly": 23166, + "carly": 22689, + "carlyle": 46555, + "carmel": 30757, + "carmel": 25601, + "carmen": 41427, + "carmen": 18834, + "carmichael": 41657, + "carn": 21597, + "carnage": 31385, + "carnation": 44577, + "carnaval": 47238, + "carne": 17053, + "carne": 42885, + "carnegie": 25287, + "carney": 34194, + "carni": 8438, + "carnival": 36708, + "carnival": 10577, + "caro": 30317, + "caro": 29344, + "carol": 4242, + "carol": 11489, + "carole": 31955, + "carolin": 26418, + "carolina": 7027, + "caroline": 31064, + "caroline": 12641, + "carols": 33269, + "carolyn": 25825, + "carou": 32224, + "carousel": 36665, + "carp": 26085, + "carpen": 15584, + "carpenter": 18475, + "carpet": 6922, + "carpets": 34612, + "carr": 26951, + "carr": 17136, + "carra": 32332, + "carre": 31114, + "carrera": 32952, + "carri": 4739, + "carriage": 47885, + "carriage": 21087, + "carrick": 44052, + "carrie": 30334, + "carrie": 15848, + "carried": 12960, + "carrier": 12308, + "carriers": 26865, + "carries": 17982, + "carrieunderwood": 47338, + "carrington": 48759, + "carroll": 41911, + "carroll": 14893, + "carrot": 15435, + "carrots": 19299, + "carry": 31863, + "carry": 6998, + "carrying": 9920, + "cars": 3346, + "carsforsale": 45222, + "carson": 41766, + "carson": 13171, + "cart": 27705, + "cart": 13065, + "cartag": 45042, + "cartagena": 47157, + "carte": 44949, + "cartel": 30529, + "carter": 27330, + "carter": 7260, + "cartier": 32951, + "carto": 5487, + "carton": 41812, + "cartoon": 33082, + "cartoon": 7651, + "cartoonist": 30793, + "cartoons": 17673, + "cartri": 47084, + "cartridge": 29432, + "cartridges": 49249, + "carts": 27581, + "cartunesapp": 32888, + "caruso": 45192, + "carve": 40152, + "carved": 15127, + "carver": 28850, + "carving": 19428, + "carvings": 48123, + "cary": 22844, + "cas": 1671, + "cas": 13831, + "casa": 14643, + "casablanc": 36572, + "casablanca": 41950, + "casc": 36714, + "casca": 43296, + "cascade": 29065, + "cascades": 46454, + "case": 17698, + "case": 2068, + "cases": 6888, + "casey": 24899, + "casey": 12836, + "cash": 11050, + "cash": 5131, + "cashback": 36368, + "cashe": 32233, + "cashew": 39531, + "cashi": 29517, + "cashier": 34547, + "cashmere": 34566, + "casi": 38350, + "casino": 10473, + "casio": 32261, + "cask": 26299, + "casm": 35198, + "casper": 35892, + "cass": 22556, + "cassandra": 35289, + "casser": 31093, + "casserole": 36045, + "cassette": 19717, + "cassi": 14942, + "cassidy": 21757, + "cassie": 29323, + "cassini": 46554, + "cast": 2509, + "cast": 1970, + "caste": 32693, + "casted": 33838, + "castel": 43306, + "castell": 31792, + "caster": 32101, + "caster": 8449, + "casters": 29721, + "castic": 47737, + "castillo": 30813, + "casting": 7087, + "castle": 12496, + "castle": 3540, + "castles": 24766, + "castro": 16950, + "casts": 10595, + "casu": 15345, + "casual": 10129, + "casually": 18840, + "casualties": 30244, + "casualty": 31222, + "cat": 1481, + "cat": 2368, + "cata": 42279, + "catal": 12792, + "catalan": 30532, + "catalina": 36576, + "catalo": 34740, + "catalog": 20036, + "catalogue": 20985, + "catalonia": 27039, + "catalunya": 44132, + "cataly": 15894, + "catalyst": 25387, + "catan": 45893, + "catap": 39514, + "catar": 35801, + "catastro": 22736, + "catastrophe": 41422, + "catastrophic": 34448, + "catch": 18901, + "catch": 3042, + "catcher": 15965, + "catchers": 39060, + "catches": 17213, + "catching": 8617, + "catchy": 37114, + "catday": 32243, + "cate": 6357, + "cate": 24510, + "cated": 31823, + "categor": 17006, + "categori": 40117, + "categories": 19971, + "category": 9432, + "cater": 16634, + "cater": 38101, + "catering": 16697, + "caterpillar": 27111, + "catfish": 26077, + "cath": 9196, + "cath": 30811, + "cathar": 43784, + "cathe": 7174, + "cathedr": 46370, + "cathedral": 7865, + "catherine": 35035, + "catherine": 12339, + "catho": 7595, + "cathol": 16315, + "catholic": 20382, + "catholic": 7757, + "catholics": 36808, + "cathy": 40326, + "cathy": 22731, + "cation": 21367, + "cato": 33558, + "cats": 38800, + "cats": 3989, + "catsofinstagram": 39901, + "catsoftwitter": 17273, + "catt": 37339, + "cattle": 48799, + "cattle": 13644, + "caturday": 20892, + "catwalk": 36565, + "catwoman": 47251, + "cau": 1121, + "cau": 45529, + "caucus": 18847, + "caught": 4520, + "caul": 23460, + "cauley": 41682, + "caulfield": 44906, + "cauli": 20123, + "cauliflower": 23802, + "cause": 18982, + "cause": 1394, + "caused": 8940, + "causes": 9775, + "causeway": 35034, + "causing": 10779, + "caution": 15656, + "cautious": 36579, + "cav": 4942, + "cav": 45935, + "cava": 48682, + "caval": 24537, + "cavali": 20783, + "cavalier": 44488, + "cavaliers": 30194, + "cavalry": 32467, + "cave": 25441, + "cave": 9654, + "cavendish": 42945, + "caver": 41487, + "caves": 22096, + "cavi": 27360, + "caviar": 31228, + "cavill": 40492, + "cavity": 43156, + "cavs": 16800, + "caw": 38405, + "caw": 43804, + "cawx": 26739, + "cay": 11876, + "cay": 37399, + "cayenne": 43650, + "cayman": 33737, + "caz": 48451, + "cb": 4034, + "cb": 8830, + "cba": 38472, + "cbb": 31487, + "cbc": 14096, + "cbc": 14523, + "cbd": 13176, + "cbe": 43639, + "cbi": 30875, + "cbj": 35608, + "cbn": 26579, + "cbp": 46723, + "cbr": 28762, + "cbs": 16788, + "cbs": 8009, + "cc": 2976, + "cc": 2021, + "cca": 17987, + "ccc": 21856, + "ccd": 48556, + "ccg": 37755, + "cch": 21789, + "cchini": 28467, + "cci": 32942, + "cci": 8196, + "ccl": 43773, + "ccm": 40435, + "cco": 28786, + "ccot": 24950, + "ccp": 43045, + "ccs": 30400, + "cctv": 23097, + "ccu": 49023, + "cd": 4308, + "cd": 4480, + "cda": 45565, + "cdc": 41098, + "cdc": 25779, + "cdn": 8886, + "cdn": 26802, + "cdnpoli": 11645, + "cdo": 47187, + "cdp": 39624, + "cds": 20784, + "cdt": 18455, + "ce": 685, + "ce": 629, + "cea": 28355, + "cean": 34409, + "cean": 37295, + "cease": 32856, + "cease": 25499, + "ceasefire": 38291, + "cebu": 20146, + "cec": 29694, + "cec": 40029, + "cecil": 26987, + "cecil": 27169, + "cecilia": 35440, + "ced": 25634, + "ced": 2323, + "cedar": 24167, + "cedar": 13799, + "cedric": 36608, + "cee": 45966, + "cee": 15015, + "cees": 47914, + "ceil": 27275, + "ceiling": 12374, + "ceilings": 33770, + "cek": 45544, + "cel": 2269, + "cel": 7597, + "cele": 1314, + "celeb": 38862, + "celeb": 19393, + "celebr": 1372, + "celebrate": 31414, + "celebrate": 2694, + "celebrated": 9184, + "celebrates": 7564, + "celebrating": 3382, + "celebration": 4615, + "celebrations": 10825, + "celebratory": 34115, + "celebrities": 17071, + "celebrity": 23981, + "celebrity": 7320, + "celebs": 19803, + "celed": 25741, + "celer": 9621, + "celery": 30990, + "celeste": 29364, + "celesti": 29497, + "celestial": 32669, + "celi": 25567, + "celia": 44489, + "celine": 33644, + "cell": 9316, + "cell": 5533, + "cellar": 24282, + "cellars": 44976, + "cellence": 34687, + "cello": 23013, + "cellphone": 39029, + "cells": 8890, + "cellu": 16791, + "cellular": 23268, + "cels": 24021, + "celsius": 47057, + "celtic": 21897, + "celtic": 10523, + "celticfc": 38612, + "celtics": 16226, + "cem": 41435, + "ceme": 10517, + "cement": 4369, + "cements": 19448, + "cemetery": 11660, + "cen": 1306, + "cen": 30106, + "cena": 21591, + "cence": 24410, + "cency": 41259, + "cene": 30038, + "censor": 24230, + "censor": 44709, + "censored": 30951, + "censorship": 27284, + "census": 23677, + "cent": 1784, + "cent": 3662, + "centenary": 22422, + "centennial": 20895, + "center": 16651, + "center": 2119, + "centered": 24584, + "centers": 14494, + "centi": 48889, + "centime": 48687, + "centr": 2370, + "central": 13448, + "central": 3339, + "centre": 26310, + "centre": 2916, + "centred": 47925, + "centres": 19354, + "centri": 30872, + "centric": 19297, + "centro": 37178, + "cents": 11934, + "centu": 16818, + "centuri": 36816, + "centuries": 19014, + "century": 26134, + "century": 4275, + "ceo": 46340, + "ceo": 3559, + "ceos": 28332, + "cep": 2632, + "cep": 48714, + "ceph": 44343, + "cept": 3678, + "ception": 12346, + "cer": 1364, + "cer": 1925, + "cera": 34608, + "ceram": 10677, + "ceramic": 15112, + "ceramics": 22438, + "cere": 3984, + "cere": 22085, + "cereal": 17581, + "cereals": 48618, + "cerebral": 39073, + "ceremon": 15796, + "ceremonial": 33281, + "ceremonies": 21547, + "ceremony": 5193, + "cern": 44851, + "cers": 13638, + "cert": 27522, + "certain": 8526, + "certain": 7883, + "certainly": 10883, + "certainty": 20054, + "certi": 4888, + "certific": 9443, + "certificate": 11786, + "certificates": 25281, + "certification": 14735, + "certified": 9288, + "cerv": 25738, + "cervical": 35953, + "ces": 28715, + "ces": 1604, + "cesar": 37025, + "cesar": 28603, + "cess": 2314, + "cess": 1554, + "cessna": 36596, + "cest": 27245, + "cester": 15769, + "cester": 12718, + "cet": 14960, + "cett": 46708, + "ceu": 37457, + "cevic": 48369, + "cey": 20971, + "cf": 10189, + "cf": 11171, + "cfa": 34521, + "cfb": 32931, + "cfc": 11577, + "cfd": 46171, + "cfl": 46320, + "cfl": 22332, + "cfo": 26937, + "cfp": 40756, + "cfr": 44033, + "cfs": 32835, + "cg": 27118, + "cg": 14740, + "cgc": 38775, + "cgi": 30520, + "ch": 540, + "ch": 634, + "cha": 1587, + "cha": 4541, + "chab": 26670, + "chad": 13095, + "chad": 12923, + "chae": 9460, + "chaf": 38123, + "chag": 27989, + "chai": 31590, + "chai": 18919, + "chain": 13898, + "chain": 3946, + "chained": 34402, + "chains": 14438, + "chainsaw": 37617, + "chainz": 39687, + "chair": 4728, + "chair": 4269, + "chaired": 31664, + "chairing": 42205, + "chairman": 6901, + "chairperson": 31584, + "chairs": 12033, + "chak": 13702, + "chak": 41713, + "chakra": 38304, + "chakra": 33241, + "chal": 7397, + "chal": 30809, + "chale": 38099, + "chalet": 37907, + "chalk": 31362, + "chalk": 17846, + "chall": 2073, + "challeng": 4138, + "challenge": 29462, + "challenge": 2836, + "challenged": 17380, + "challenger": 18228, + "challengers": 46404, + "challenges": 6280, + "challenging": 11754, + "chalmers": 47955, + "cham": 1290, + "cham": 19951, + "chamber": 18983, + "chamber": 7642, + "chamberlain": 32756, + "chambers": 16501, + "chamele": 34759, + "chameleon": 41317, + "champ": 36813, + "champ": 6602, + "champag": 10283, + "champagne": 11007, + "champi": 1680, + "champion": 2643, + "champion": 3950, + "champions": 4227, + "championship": 3429, + "championships": 7047, + "championsleague": 27638, + "champs": 6240, + "chan": 1255, + "chan": 6704, + "chana": 48752, + "chanc": 13931, + "chance": 32940, + "chance": 2594, + "chancellor": 15886, + "chances": 10870, + "chand": 7126, + "chand": 41508, + "chandelier": 30570, + "chandi": 12482, + "chandigarh": 34106, + "chandler": 17595, + "chandra": 27082, + "chandra": 25348, + "chanel": 16951, + "chang": 2233, + "chang": 16461, + "change": 11608, + "change": 1799, + "changeable": 41335, + "changed": 4907, + "changer": 18406, + "changers": 35185, + "changes": 4938, + "changing": 40384, + "changing": 5621, + "changmin": 47410, + "chann": 8804, + "channel": 25837, + "channel": 3847, + "channeling": 28197, + "channels": 13961, + "channing": 37417, + "chant": 18165, + "chant": 13521, + "chanting": 32111, + "chants": 22723, + "chanyeol": 18805, + "chao": 31815, + "chaos": 10853, + "chaotic": 33501, + "chap": 3825, + "chap": 21939, + "chapel": 40859, + "chapel": 10137, + "chaplain": 38348, + "chaplin": 32545, + "chapman": 17968, + "chapp": 20634, + "chaps": 36823, + "chapter": 6014, + "chapters": 22936, + "char": 1054, + "char": 16017, + "chara": 35668, + "charac": 2792, + "character": 10997, + "character": 4009, + "characterdesign": 38149, + "characteri": 20920, + "characteristic": 44747, + "characteristics": 26037, + "characters": 6564, + "charan": 31851, + "charcoal": 19268, + "chard": 17524, + "chardon": 26599, + "chardonnay": 28161, + "charge": 25032, + "charge": 5948, + "chargeable": 35664, + "charged": 7916, + "charger": 13090, + "chargers": 17352, + "charges": 8962, + "charging": 12514, + "chariot": 38811, + "charis": 24449, + "charisma": 45041, + "charismatic": 37205, + "charitable": 23256, + "charities": 18493, + "charity": 20008, + "charity": 4607, + "charitytuesday": 42794, + "charl": 47736, + "charle": 10217, + "charles": 27983, + "charles": 5127, + "charleston": 15478, + "charley": 38027, + "charli": 21784, + "charli": 49392, + "charlie": 16764, + "charlie": 6393, + "charlotte": 18445, + "charlotte": 7871, + "charlottesville": 32027, + "charlton": 27048, + "charm": 10876, + "charmed": 39790, + "charming": 12177, + "charms": 21944, + "charred": 44085, + "chart": 42685, + "chart": 5053, + "charted": 27939, + "charter": 42345, + "charter": 13569, + "chartered": 31298, + "charters": 46626, + "charting": 39841, + "charts": 10728, + "chas": 10717, + "chas": 29838, + "chase": 21503, + "chase": 3859, + "chased": 30342, + "chaser": 29560, + "chasers": 34158, + "chases": 45011, + "chasing": 46909, + "chasing": 13376, + "chassis": 29188, + "chast": 42176, + "chasu": 41352, + "chat": 5355, + "chat": 2402, + "chatbots": 43994, + "chate": 30377, + "chateau": 44582, + "chateau": 23520, + "chath": 46849, + "chatham": 32030, + "chats": 13263, + "chatt": 21618, + "chattanoo": 28009, + "chattanooga": 29866, + "chatted": 34124, + "chatter": 33473, + "chatter": 41103, + "chatting": 12401, + "chatur": 33839, + "chau": 11263, + "chau": 37536, + "chauffe": 45440, + "chauhan": 46663, + "chav": 28997, + "chavez": 27480, + "chaw": 39639, + "chay": 45317, + "chaz": 47815, + "chc": 36233, + "chd": 41645, + "che": 983, + "che": 3842, + "chea": 39580, + "chead": 48358, + "cheap": 27036, + "cheap": 8678, + "cheape": 26164, + "cheaper": 17776, + "cheapest": 26640, + "cheat": 18180, + "cheated": 34285, + "cheating": 19722, + "chec": 1113, + "check": 7672, + "check": 1217, + "checked": 10387, + "checker": 45883, + "checkers": 48181, + "checking": 7441, + "checklist": 26989, + "checkout": 13101, + "checkpoint": 27531, + "checks": 13737, + "ched": 11341, + "ched": 2146, + "cheddar": 20551, + "chee": 5326, + "chee": 20944, + "cheek": 40000, + "cheek": 21227, + "cheeks": 23019, + "cheeky": 15068, + "cheer": 9733, + "cheer": 6918, + "cheered": 38111, + "cheerful": 28882, + "cheering": 14289, + "cheerleader": 29072, + "cheerleaders": 22343, + "cheerleading": 36366, + "cheers": 6562, + "chees": 15182, + "cheese": 10738, + "cheese": 4108, + "cheeseburger": 41200, + "cheesecake": 17803, + "cheeses": 36076, + "cheesy": 22093, + "cheetah": 27431, + "chef": 12137, + "chef": 4895, + "chefs": 14486, + "chek": 43745, + "chel": 3084, + "chel": 25970, + "chell": 46854, + "chelle": 30141, + "chelms": 34936, + "chelmsford": 39890, + "chelse": 19071, + "chelsea": 6031, + "chelseafc": 25927, + "chelten": 18889, + "cheltenham": 21589, + "chem": 5667, + "chem": 13698, + "chemi": 7179, + "chemical": 39376, + "chemical": 9208, + "chemicals": 17426, + "chemist": 23138, + "chemistry": 8841, + "chemo": 33095, + "chemo": 36348, + "chemotherapy": 41412, + "chemtrails": 46015, + "chen": 5907, + "chen": 8983, + "cheney": 43522, + "cheng": 32512, + "cheng": 30190, + "chenko": 29073, + "chennai": 28948, + "chennai": 12791, + "cheon": 11498, + "cheque": 28168, + "cher": 3597, + "cher": 3466, + "cheri": 26471, + "cherish": 20053, + "cherished": 42325, + "cherno": 35376, + "chernobyl": 40554, + "chero": 19844, + "cherokee": 22860, + "cherries": 27248, + "cherry": 21470, + "cherry": 7325, + "chers": 5789, + "chery": 38478, + "cheryl": 37784, + "cheryl": 20600, + "ches": 18346, + "ches": 1910, + "chesa": 28349, + "chesapeake": 32909, + "cheshire": 17130, + "chesney": 48747, + "chess": 27170, + "chess": 8397, + "chest": 18217, + "chest": 10563, + "chester": 10466, + "chester": 3343, + "chesterfield": 32975, + "chestnut": 21834, + "chet": 9663, + "chett": 24695, + "chev": 7152, + "chev": 41145, + "chevro": 12850, + "chevrolet": 13240, + "chevron": 33792, + "chevy": 16581, + "chew": 32645, + "chew": 22642, + "chewan": 23689, + "chewbacca": 49355, + "chewing": 31486, + "chewy": 42940, + "chey": 26968, + "chey": 31208, + "cheyenne": 34805, + "chez": 49183, + "chez": 10556, + "chf": 33021, + "chfield": 41619, + "chhat": 34127, + "chhattisgarh": 44246, + "chi": 1337, + "chi": 4039, + "chia": 19147, + "chiang": 33764, + "chibi": 22306, + "chic": 2627, + "chic": 9091, + "chica": 44190, + "chicag": 16778, + "chicago": 15038, + "chicago": 3530, + "chicagof": 40638, + "chicagofire": 46576, + "chicas": 40664, + "chichester": 43823, + "chick": 3170, + "chick": 11238, + "chicken": 26322, + "chicken": 3717, + "chickens": 21658, + "chickpea": 48109, + "chicks": 17810, + "chico": 30379, + "chie": 40046, + "chie": 12388, + "chief": 16830, + "chief": 3455, + "chiefs": 11419, + "chiev": 47761, + "chiff": 27407, + "chiffon": 31817, + "chig": 42952, + "chihu": 22857, + "chihuahu": 25437, + "chihuahua": 30181, + "chik": 45455, + "chil": 1333, + "child": 4392, + "child": 2913, + "childcare": 31133, + "childhood": 34772, + "childhood": 7551, + "childish": 31939, + "childre": 2135, + "children": 11101, + "children": 2153, + "childrens": 31551, + "childrens": 21553, + "childs": 39521, + "chile": 10022, + "chilean": 33186, + "chili": 13033, + "chill": 6498, + "chill": 6382, + "chilled": 23540, + "chillen": 45160, + "chilli": 26787, + "chilli": 17067, + "chillin": 10347, + "chilling": 10179, + "chillout": 39842, + "chills": 25460, + "chilly": 14450, + "chim": 10543, + "chimney": 26821, + "chimp": 44374, + "chin": 6555, + "chin": 8979, + "china": 38943, + "china": 2817, + "chinatown": 28582, + "chine": 4013, + "chinese": 30568, + "chinese": 4271, + "ching": 34621, + "ching": 1439, + "chino": 47181, + "chino": 27440, + "chinook": 41577, + "chinson": 33786, + "chio": 19650, + "chip": 19271, + "chip": 8730, + "chipmun": 46384, + "chipot": 17702, + "chipotle": 19284, + "chipp": 39854, + "chippe": 46541, + "chipped": 39892, + "chipping": 40323, + "chips": 8855, + "chir": 15564, + "chiro": 23413, + "chiroprac": 25987, + "chiropractic": 34437, + "chis": 19920, + "chistan": 20523, + "chiswick": 47290, + "chit": 13515, + "chit": 45626, + "chita": 49184, + "chitec": 39862, + "chive": 29222, + "chives": 34921, + "chk": 47424, + "chl": 38592, + "chley": 47748, + "chlo": 10374, + "chloe": 39966, + "chloe": 13992, + "chlor": 23135, + "chman": 35835, + "chment": 20848, + "chner": 48277, + "cho": 1327, + "cho": 5150, + "choa": 43077, + "choc": 32772, + "choc": 21983, + "choco": 46285, + "choco": 32692, + "chocol": 3443, + "chocolat": 44631, + "chocolate": 29389, + "chocolate": 3820, + "chocolates": 24120, + "choi": 23749, + "choic": 35606, + "choice": 23857, + "choice": 4051, + "choices": 11016, + "choir": 9214, + "choirs": 43277, + "choke": 30231, + "choked": 43521, + "choker": 39642, + "choking": 39993, + "chol": 19802, + "cholera": 45999, + "cholester": 26861, + "cholesterol": 27982, + "chom": 25151, + "chon": 20416, + "chon": 21601, + "chondri": 37379, + "chong": 26220, + "choo": 3869, + "choo": 24437, + "chool": 29578, + "chools": 41958, + "choose": 22756, + "choose": 5073, + "chooses": 29923, + "choosing": 13475, + "chop": 10458, + "chop": 16663, + "chopin": 42256, + "chopped": 22580, + "chopper": 24011, + "chopping": 35375, + "chopra": 24258, + "chops": 26321, + "chor": 7567, + "chor": 47795, + "choral": 26684, + "chord": 33005, + "chords": 36152, + "choreo": 17443, + "choreographer": 35952, + "choreography": 32749, + "chores": 40483, + "chori": 25718, + "chorizo": 30802, + "chorus": 20869, + "chos": 26559, + "chose": 11090, + "chosen": 10044, + "chou": 16960, + "chou": 42917, + "choudhary": 45503, + "chow": 20257, + "chow": 21657, + "chowder": 37886, + "chp": 35896, + "chr": 36918, + "chri": 1135, + "chris": 9907, + "chris": 2978, + "chrisbrown": 41035, + "chriss": 46745, + "chrissy": 44762, + "chrissy": 40485, + "christ": 1403, + "christ": 6703, + "christchurch": 27100, + "christen": 31956, + "christensen": 42226, + "christi": 3328, + "christi": 33213, + "christian": 11792, + "christian": 4729, + "christianity": 20000, + "christians": 14842, + "christie": 16084, + "christin": 30189, + "christina": 15925, + "christine": 42610, + "christine": 14712, + "christma": 12039, + "christmas": 18174, + "christmas": 1677, + "christmaseve": 44381, + "christmass": 44873, + "christop": 7917, + "christoph": 47844, + "christophe": 45486, + "christopher": 33349, + "christopher": 9630, + "christy": 28331, + "chro": 13207, + "chromatic": 44207, + "chrome": 24843, + "chrome": 9529, + "chromo": 35809, + "chron": 5577, + "chron": 39781, + "chronic": 10115, + "chronic": 13677, + "chronicle": 20034, + "chronicles": 18905, + "chrono": 29387, + "chronograph": 38397, + "chry": 13508, + "chrysler": 20078, + "chs": 40277, + "chs": 8391, + "chsnews": 44919, + "cht": 11384, + "chter": 47811, + "chu": 3799, + "chu": 13622, + "chubby": 29109, + "chuck": 13211, + "chuck": 9894, + "chuckle": 35733, + "chucky": 42026, + "chuffed": 27233, + "chuk": 25878, + "chuk": 27221, + "chul": 33001, + "chum": 46869, + "chum": 41767, + "chun": 14693, + "chun": 25391, + "chung": 28418, + "chunk": 30275, + "chunks": 45538, + "chunky": 27978, + "chups": 46331, + "chur": 2309, + "church": 14956, + "church": 2735, + "churches": 15539, + "churchill": 17527, + "chus": 36246, + "chut": 28788, + "chutney": 36261, + "chy": 15131, + "chy": 8096, + "chyna": 43398, + "châ": 48669, + "ci": 698, + "ci": 5798, + "cia": 4019, + "cial": 1143, + "cian": 32323, + "ciao": 37677, + "ciara": 31369, + "cible": 28873, + "cic": 14539, + "cic": 21517, + "cid": 27359, + "cide": 34178, + "cider": 13547, + "cides": 41326, + "cie": 19730, + "cier": 24067, + "cies": 6785, + "cif": 35698, + "cigar": 26031, + "cigar": 16525, + "cigare": 13044, + "cigarette": 18548, + "cigarettes": 22750, + "cigars": 20750, + "cii": 42408, + "cil": 9217, + "cil": 2998, + "cilan": 33998, + "cilantro": 34568, + "cili": 18977, + "ciliation": 25294, + "cim": 30021, + "cin": 2396, + "cin": 25367, + "cina": 39467, + "cincin": 13291, + "cincinnati": 14197, + "cinco": 25131, + "cincode": 40930, + "cincodemayo": 42542, + "cincy": 30015, + "cincy": 30286, + "cinde": 20660, + "cinderella": 21515, + "cindy": 34439, + "cindy": 18532, + "cine": 4015, + "cine": 27451, + "cinema": 38251, + "cinema": 6443, + "cinemas": 14845, + "cinematic": 25602, + "cinemato": 21919, + "cinematographer": 39059, + "cinematography": 33802, + "ciner": 39882, + "cing": 4014, + "cini": 25699, + "cinnam": 12768, + "cinnamon": 13460, + "cino": 18616, + "cio": 44584, + "cio": 9954, + "cion": 22024, + "ciones": 37155, + "cious": 38466, + "cip": 32884, + "cir": 2459, + "cir": 41135, + "circa": 10411, + "circle": 33574, + "circle": 7117, + "circles": 19411, + "circling": 46036, + "circu": 5143, + "circuit": 35583, + "circuit": 9801, + "circuits": 33260, + "circul": 16618, + "circular": 19733, + "circulare": 39525, + "circulareconomy": 39878, + "circulated": 46258, + "circulating": 42980, + "circulation": 27880, + "circum": 13406, + "circumstances": 18786, + "circus": 11833, + "cirque": 36049, + "cis": 9459, + "cis": 23513, + "cisco": 36689, + "cisco": 19290, + "cise": 19657, + "cisely": 33434, + "cision": 41957, + "cism": 24166, + "cist": 40906, + "cit": 4420, + "cit": 31294, + "citadel": 38036, + "citation": 33581, + "cite": 32641, + "cited": 25069, + "cites": 34490, + "citi": 4280, + "citi": 30270, + "cities": 5441, + "citing": 29088, + "citiz": 5816, + "citizen": 11720, + "citizen": 9814, + "citizens": 7949, + "citizenship": 17386, + "cito": 42636, + "citro": 27941, + "citroen": 35805, + "citrus": 17379, + "city": 5002, + "city": 1305, + "cityfc": 28751, + "cityo": 25709, + "cityof": 11595, + "cityscape": 40808, + "ciu": 39693, + "cius": 42559, + "civ": 40039, + "civic": 32240, + "civic": 11888, + "civil": 6923, + "civil": 6450, + "civilian": 21187, + "civilians": 18076, + "civilization": 22503, + "civilwar": 34524, + "ción": 44700, + "cj": 15238, + "cj": 15205, + "ck": 916, + "ck": 868, + "cke": 25224, + "cke": 40989, + "cked": 3441, + "cken": 25566, + "cker": 15509, + "cker": 4744, + "ckers": 37073, + "cket": 5525, + "ckett": 33899, + "ckey": 15029, + "ckey": 3657, + "cki": 36916, + "cki": 41055, + "cking": 4805, + "cko": 28818, + "cks": 2031, + "cky": 26229, + "cky": 3083, + "cl": 969, + "cl": 6482, + "cla": 940, + "cla": 20636, + "clad": 31606, + "cladding": 46411, + "clai": 29459, + "claim": 4290, + "claim": 6607, + "claimed": 9010, + "claiming": 15286, + "claims": 6852, + "clair": 31441, + "clair": 14039, + "claire": 20410, + "claire": 10460, + "clam": 13588, + "clam": 32598, + "clamation": 21793, + "clamp": 41501, + "clams": 38849, + "clan": 29252, + "clan": 14114, + "clancy": 37227, + "clans": 38279, + "clap": 30037, + "clap": 25546, + "clapham": 43619, + "clapton": 37683, + "clar": 3617, + "clara": 19468, + "clare": 18948, + "clare": 15927, + "claremont": 47789, + "clarence": 29320, + "clari": 15175, + "clarify": 37004, + "clarinet": 41178, + "clarity": 21323, + "clark": 13340, + "clark": 7521, + "clarke": 11548, + "clarkson": 25706, + "clas": 32003, + "clash": 38367, + "clash": 9359, + "clashes": 25193, + "clasico": 43567, + "class": 2876, + "class": 1874, + "classes": 6919, + "classi": 2507, + "classic": 9353, + "classic": 2713, + "classical": 22179, + "classical": 11355, + "classicalmusic": 27806, + "classiccar": 46906, + "classiccars": 21064, + "classics": 10634, + "classification": 26612, + "classified": 22056, + "classmate": 37090, + "classmates": 30062, + "classof": 25345, + "classroom": 9001, + "classrooms": 25768, + "classy": 11615, + "clau": 7526, + "claude": 17461, + "claudi": 39439, + "claudia": 21893, + "claudio": 31230, + "claus": 23317, + "clause": 26151, + "clave": 24111, + "claw": 49230, + "claw": 19106, + "claws": 29161, + "clay": 10402, + "clay": 8823, + "clays": 26128, + "clayton": 46445, + "clayton": 19413, + "clc": 31380, + "cle": 1321, + "cle": 2537, + "clean": 3572, + "clean": 3772, + "cleaned": 17468, + "cleanenergy": 43538, + "cleaner": 15619, + "cleaners": 33258, + "cleaning": 7210, + "cleanliness": 47886, + "cleans": 40827, + "cleanse": 28717, + "cleanser": 44170, + "cleansing": 25931, + "cleanup": 22353, + "clear": 4631, + "clear": 3143, + "clearance": 17959, + "cleared": 14880, + "clearer": 37031, + "clearing": 15481, + "clearly": 7767, + "clears": 29092, + "clearwater": 32124, + "cleary": 44342, + "cleats": 33486, + "cleavage": 44165, + "cled": 12827, + "clegg": 42915, + "clemens": 45896, + "clement": 22592, + "clement": 24714, + "clemente": 42461, + "clementine": 47112, + "clements": 49175, + "clemson": 38170, + "clemson": 19537, + "clen": 35547, + "cleo": 40344, + "cleop": 36287, + "cleopatra": 41212, + "cler": 11828, + "clergy": 42635, + "cleric": 43748, + "clerk": 22230, + "clermont": 47529, + "cles": 8077, + "cleve": 37599, + "clevel": 7701, + "cleveland": 30716, + "cleveland": 8430, + "clever": 30977, + "clever": 13385, + "clg": 47546, + "cli": 1503, + "clich": 44407, + "click": 16676, + "click": 3585, + "clicked": 29015, + "clicking": 26542, + "clicks": 31250, + "client": 48528, + "client": 7467, + "clients": 8114, + "clif": 13182, + "cliff": 23827, + "cliff": 10625, + "cliffe": 15170, + "clifford": 24226, + "cliffs": 20953, + "clifton": 23878, + "climat": 37283, + "climate": 7854, + "climate": 4589, + "climateaction": 31622, + "climatechange": 11055, + "climates": 46022, + "climax": 37033, + "climb": 7421, + "climb": 10649, + "climbed": 22528, + "climber": 36910, + "climbers": 47648, + "climbing": 9877, + "climbs": 29098, + "clin": 2879, + "clinch": 30404, + "clinched": 44064, + "cline": 37460, + "cling": 37068, + "cling": 4760, + "clinic": 7926, + "clinical": 35133, + "clinical": 9148, + "clinicians": 45866, + "clinics": 23330, + "clint": 37542, + "clint": 21160, + "clinton": 34403, + "clinton": 5820, + "clio": 46889, + "clip": 39712, + "clip": 9289, + "clipped": 45524, + "clipper": 42245, + "clippers": 23319, + "clipping": 47484, + "clips": 16594, + "clique": 34983, + "clive": 36086, + "clive": 21509, + "cll": 46091, + "cllr": 45743, + "cllr": 23034, + "clo": 1194, + "cloak": 36528, + "clock": 19878, + "clock": 6716, + "clocked": 49049, + "clocks": 25895, + "clockwise": 46150, + "clockwork": 42297, + "clon": 24477, + "clone": 22854, + "clones": 48047, + "clooney": 33161, + "clos": 48821, + "close": 10603, + "close": 2660, + "closed": 4552, + "closely": 13478, + "closer": 6377, + "closes": 11354, + "closest": 14975, + "closet": 14221, + "closeup": 35439, + "closing": 7101, + "closure": 13249, + "closures": 22923, + "cloth": 14559, + "clothes": 7080, + "clothing": 7425, + "clou": 4069, + "cloud": 12965, + "cloud": 3887, + "cloudcomputing": 41390, + "clouds": 6244, + "cloudy": 13106, + "clough": 42909, + "clover": 39574, + "clover": 22812, + "clow": 18386, + "clown": 15329, + "clowns": 30820, + "cls": 44251, + "clt": 29651, + "clt": 24236, + "clu": 996, + "club": 9642, + "club": 1736, + "clubbing": 48128, + "clubhouse": 26553, + "clubs": 9437, + "clue": 14994, + "clueless": 35350, + "clues": 23764, + "clusive": 41362, + "cluster": 15595, + "clusters": 33217, + "clut": 28507, + "clutch": 13953, + "clutter": 40804, + "cly": 12037, + "clyde": 39557, + "clyde": 18469, + "cm": 10190, + "cm": 3741, + "cma": 30554, + "cma": 31388, + "cmc": 45839, + "cmdr": 48250, + "cme": 34946, + "cmo": 24589, + "cmon": 42904, + "cmp": 46355, + "cms": 22520, + "cmt": 42727, + "cmu": 43046, + "cn": 3886, + "cn": 16200, + "cna": 48287, + "cnbc": 41242, + "cnbc": 24371, + "cnblue": 36018, + "cnc": 20571, + "cnet": 47487, + "cnews": 24319, + "cng": 41496, + "cnn": 22405, + "cnn": 8259, + "cns": 46095, + "cny": 31614, + "co": 622, + "co": 1320, + "coa": 29167, + "coach": 3275, + "coach": 2312, + "coached": 30228, + "coachella": 20222, + "coaches": 6924, + "coaching": 7766, + "coal": 10227, + "coal": 7919, + "coalition": 12920, + "coast": 6398, + "coast": 3720, + "coastal": 38246, + "coastal": 10852, + "coaster": 15944, + "coasters": 31548, + "coastguard": 40601, + "coastline": 27959, + "coasts": 42225, + "coat": 28869, + "coat": 7356, + "coated": 23401, + "coates": 36899, + "coating": 25369, + "coatings": 48706, + "coats": 18075, + "cob": 20140, + "cob": 32863, + "cobain": 36866, + "cobalt": 30896, + "cobb": 22719, + "cobble": 47894, + "cobra": 21574, + "coc": 23036, + "coc": 39498, + "coca": 21197, + "cocac": 26393, + "cocacola": 31248, + "cocaine": 20534, + "coch": 18599, + "cochran": 48798, + "cochrane": 41752, + "coco": 11850, + "coco": 13316, + "cocoa": 18074, + "cocon": 8597, + "coconut": 9581, + "cod": 16132, + "cod": 11915, + "code": 11582, + "code": 3217, + "coded": 33703, + "coden": 43914, + "coder": 41561, + "codes": 14566, + "codi": 39711, + "coding": 12647, + "cody": 23222, + "cody": 12666, + "coe": 15386, + "coed": 41028, + "coel": 45633, + "coer": 41198, + "coeur": 44986, + "coffe": 2255, + "coffee": 12898, + "coffee": 2453, + "coffees": 41184, + "coffey": 48066, + "cofficial": 18757, + "coffin": 29907, + "cog": 26362, + "cog": 35960, + "cogn": 12210, + "cognac": 44361, + "cognition": 46825, + "cognitive": 16584, + "cohe": 20669, + "cohen": 13381, + "coherent": 48450, + "cohort": 22782, + "coil": 25307, + "coim": 41528, + "coin": 14651, + "coin": 4170, + "coinci": 14015, + "coincidence": 19807, + "coins": 10530, + "coke": 39602, + "coke": 14035, + "col": 754, + "col": 9371, + "cola": 15444, + "colbert": 31647, + "colby": 32068, + "colchester": 31715, + "cold": 11146, + "cold": 3153, + "colder": 23859, + "coldest": 31438, + "coldplay": 27770, + "cole": 9305, + "cole": 8166, + "coleman": 15774, + "coles": 40265, + "coles": 30398, + "coli": 18877, + "coli": 15910, + "colin": 20989, + "colin": 10238, + "coliseum": 21836, + "coll": 25982, + "coll": 23898, + "colla": 2929, + "collab": 14013, + "collabor": 4437, + "collaborate": 21271, + "collaborated": 42265, + "collaborating": 25545, + "collaboration": 6642, + "collaborations": 36520, + "collaborative": 15841, + "collaborator": 48186, + "collaborators": 45901, + "collage": 11258, + "collagen": 36120, + "collap": 16881, + "collapse": 16520, + "collapsed": 25037, + "collapses": 43601, + "collar": 39662, + "collar": 13497, + "collateral": 44512, + "colle": 1801, + "colleague": 13067, + "colleagues": 8203, + "collec": 1733, + "collect": 10186, + "collected": 11980, + "collecti": 18530, + "collectible": 25680, + "collectibles": 21519, + "collecting": 10325, + "collection": 2548, + "collections": 12760, + "collective": 10162, + "collectively": 40687, + "collector": 13522, + "collectors": 20540, + "collects": 31576, + "colleen": 31020, + "college": 13512, + "college": 2229, + "colleges": 17357, + "collegi": 16311, + "collegiate": 18068, + "colli": 8262, + "collide": 27214, + "collie": 30611, + "collier": 35748, + "collin": 24056, + "collin": 32116, + "colling": 32319, + "collingwood": 45873, + "collins": 8684, + "collision": 15407, + "collo": 25115, + "colloqui": 37243, + "colloquium": 46514, + "collu": 25658, + "collusion": 33864, + "colo": 7300, + "colo": 27288, + "cologne": 22216, + "cology": 19187, + "colom": 8987, + "colombia": 12901, + "colombian": 28701, + "colombo": 33207, + "colon": 8280, + "colon": 29050, + "colonel": 22674, + "coloni": 22667, + "colonial": 16530, + "colonialism": 43385, + "colonies": 38738, + "colony": 18767, + "color": 4036, + "color": 3140, + "colorado": 34580, + "colorado": 6742, + "colorec": 41171, + "colored": 11775, + "colorful": 11444, + "colori": 28764, + "coloring": 17696, + "colorized": 46730, + "colors": 5389, + "colorstv": 28195, + "colorway": 44576, + "colossal": 40258, + "colosse": 48142, + "colossus": 34022, + "colour": 10240, + "colour": 4769, + "coloured": 17111, + "colourful": 15562, + "colouring": 31803, + "colours": 7626, + "cols": 35726, + "colt": 19726, + "colton": 32249, + "coltrane": 42333, + "colts": 16135, + "colum": 4164, + "columb": 31043, + "columbi": 25947, + "columbia": 9410, + "columbus": 11273, + "column": 10593, + "columnist": 28958, + "columns": 29056, + "com": 610, + "com": 2464, + "coma": 19620, + "comb": 3587, + "comb": 16380, + "combat": 35083, + "combat": 9275, + "combating": 46121, + "combe": 14363, + "combin": 25112, + "combination": 11312, + "combinations": 34950, + "combine": 12919, + "combined": 10427, + "combines": 22991, + "combining": 23561, + "combo": 10155, + "combos": 48117, + "combs": 30694, + "combu": 35629, + "combustion": 44654, + "comcast": 30043, + "come": 4225, + "come": 891, + "comeback": 8234, + "comedian": 13848, + "comedians": 33758, + "comedic": 43360, + "comedy": 19346, + "comedy": 4749, + "comer": 42997, + "comer": 20916, + "comers": 34436, + "comes": 2091, + "comet": 21405, + "comets": 40636, + "comey": 22957, + "comfor": 6563, + "comfort": 44000, + "comfort": 7808, + "comfortable": 8652, + "comfortably": 30392, + "comforting": 33835, + "comforts": 42243, + "comfy": 15736, + "comi": 40781, + "comic": 7729, + "comic": 4962, + "comicart": 46018, + "comicbook": 46564, + "comicbooks": 22018, + "comiccon": 18379, + "comicon": 43820, + "comics": 4256, + "comin": 18164, + "coming": 14916, + "coming": 1171, + "comingsoon": 19894, + "comm": 965, + "comm": 11413, + "comman": 39780, + "command": 18391, + "command": 11350, + "commander": 11265, + "commanders": 41667, + "commanding": 36933, + "commandments": 43409, + "commando": 31361, + "commands": 38163, + "comme": 29692, + "commemor": 9495, + "commemorate": 21242, + "commemorates": 45149, + "commemorating": 28734, + "commemoration": 29288, + "commemorative": 24623, + "commen": 15795, + "commence": 25059, + "commenced": 43908, + "commencement": 21666, + "commences": 48551, + "commend": 37555, + "commended": 40702, + "comment": 20035, + "comment": 5761, + "commentary": 14146, + "commentator": 32016, + "commented": 28328, + "commenting": 37292, + "comments": 6606, + "commer": 4028, + "commerce": 8333, + "commerci": 15601, + "commercial": 31802, + "commercial": 6287, + "commercials": 30724, + "commish": 45399, + "commissi": 6000, + "commission": 5292, + "commissioned": 16565, + "commissioner": 10221, + "commissioners": 30702, + "commissioning": 29585, + "commissions": 20668, + "commit": 3041, + "commit": 11797, + "commitment": 7770, + "commitments": 32136, + "commits": 20241, + "committed": 7907, + "committee": 5636, + "committees": 40504, + "committing": 21937, + "commod": 9496, + "commodities": 30350, + "commodity": 29041, + "commodore": 31129, + "common": 8414, + "common": 4176, + "commonly": 20344, + "commons": 16653, + "commonwealth": 16569, + "comms": 18832, + "commu": 9561, + "commun": 1515, + "communal": 32809, + "communi": 16164, + "communic": 4784, + "communicate": 19809, + "communication": 7999, + "communications": 10052, + "communion": 28579, + "communism": 35387, + "communist": 18602, + "communities": 6361, + "community": 14784, + "community": 1927, + "commute": 15898, + "commuter": 27782, + "commuters": 30823, + "commuting": 43503, + "como": 16236, + "comp": 2561, + "comp": 11679, + "compac": 40014, + "compact": 13690, + "compan": 1995, + "companies": 5361, + "companion": 14963, + "companions": 37124, + "company": 2634, + "compar": 7580, + "comparable": 27092, + "comparative": 33388, + "compare": 13771, + "compared": 10544, + "compares": 25104, + "comparing": 20564, + "comparison": 14186, + "comparisons": 40870, + "compart": 30072, + "compartment": 40383, + "compass": 19438, + "compassion": 14463, + "compassionate": 30193, + "compati": 17295, + "compatibility": 41614, + "compatible": 21286, + "compe": 5254, + "compelled": 49375, + "compelling": 21766, + "compen": 42079, + "compens": 15172, + "compensation": 18663, + "compet": 2932, + "compete": 10038, + "competed": 27767, + "competen": 31853, + "competence": 31165, + "competency": 49293, + "competent": 28113, + "competes": 39826, + "competing": 13068, + "competit": 15892, + "competiti": 32581, + "competition": 3742, + "competitions": 23259, + "competitive": 10687, + "competitiveness": 43209, + "competitor": 26633, + "competitors": 23638, + "compilation": 20446, + "compiled": 34579, + "compla": 7428, + "complain": 19292, + "complained": 42029, + "complaining": 20812, + "complains": 46363, + "complaint": 20391, + "complaints": 20020, + "comple": 1730, + "complement": 36624, + "complementary": 48953, + "complete": 3263, + "completed": 5976, + "completely": 5989, + "completes": 19321, + "completing": 14949, + "completion": 15915, + "complex": 16099, + "complex": 6324, + "complexes": 47870, + "complexion": 47732, + "complexity": 24815, + "compli": 5270, + "compliance": 14658, + "compliant": 29893, + "complic": 11460, + "complicated": 16621, + "complications": 29936, + "compliment": 25116, + "complimentary": 20948, + "compliments": 25477, + "comply": 36281, + "component": 21284, + "components": 16816, + "compos": 7783, + "compose": 43659, + "composed": 19916, + "composer": 12104, + "composers": 33314, + "composing": 40412, + "composite": 21606, + "composites": 45395, + "composition": 17510, + "compositions": 44652, + "compost": 46002, + "compost": 33307, + "compound": 19980, + "compounds": 33991, + "compre": 8483, + "compreh": 42976, + "comprehen": 12050, + "comprehend": 48230, + "comprehensive": 13854, + "compress": 33353, + "compressed": 42359, + "compression": 25638, + "compressor": 39607, + "compri": 29445, + "compromise": 26611, + "compromised": 38576, + "compromising": 45436, + "comps": 48665, + "compton": 28364, + "compu": 11639, + "compul": 25869, + "compulsory": 39345, + "computing": 12732, + "comra": 25553, + "comrade": 30844, + "comrades": 29282, + "coms": 30493, + "con": 616, + "con": 2457, + "cona": 30605, + "conan": 24750, + "conce": 9145, + "concealed": 35419, + "conceded": 37895, + "conceived": 39725, + "concentr": 11085, + "concentrate": 30846, + "concentrated": 36776, + "concentration": 18565, + "concep": 8389, + "concepcion": 47035, + "concept": 6353, + "conceptart": 31162, + "conception": 30510, + "conceptions": 40307, + "concepts": 16763, + "conceptu": 42745, + "conceptual": 34070, + "concer": 2228, + "concern": 12928, + "concerned": 12020, + "concerning": 21772, + "concerns": 11134, + "concert": 32180, + "concert": 3066, + "concerto": 24710, + "concerts": 14418, + "concession": 38117, + "concessions": 43981, + "concier": 28859, + "concierge": 39850, + "conclave": 38098, + "conclu": 9627, + "conclude": 37525, + "concluded": 27825, + "concludes": 30634, + "conclusion": 20932, + "conclusions": 39507, + "conco": 43034, + "concor": 19913, + "concord": 26448, + "concordia": 35492, + "concours": 36282, + "concourse": 37793, + "concre": 43658, + "concrete": 9637, + "concussion": 28321, + "condem": 13287, + "condemn": 27212, + "condemned": 35145, + "condemns": 32092, + "conden": 24816, + "conditi": 11170, + "condition": 36978, + "condition": 7336, + "conditional": 24671, + "conditioned": 37014, + "conditioner": 31239, + "conditioning": 18181, + "conditions": 5892, + "condo": 19952, + "condol": 18661, + "condolences": 20836, + "condom": 39021, + "condomin": 42589, + "condoms": 37878, + "condor": 47643, + "condos": 42342, + "condu": 40772, + "conduc": 5379, + "conduct": 11647, + "conducted": 13080, + "conducting": 16787, + "conductor": 22317, + "conducts": 32084, + "cone": 39279, + "cone": 10266, + "cones": 26718, + "coney": 41837, + "conf": 6477, + "confe": 1968, + "confeder": 17104, + "confederate": 24864, + "confederation": 43484, + "conferen": 37961, + "conference": 2230, + "conferences": 22811, + "conferencing": 47320, + "confess": 38860, + "confession": 22572, + "confessions": 29404, + "confetti": 37923, + "confi": 5005, + "confidence": 8510, + "confident": 12365, + "confidential": 28712, + "configu": 46746, + "configur": 26950, + "configuration": 33378, + "confin": 45316, + "confined": 40973, + "confir": 3930, + "confirm": 12130, + "confirmation": 19645, + "confirmed": 6346, + "confirming": 38433, + "confirms": 11803, + "confis": 36285, + "confit": 42241, + "confl": 8173, + "conflic": 19029, + "conflict": 10397, + "conflicting": 43894, + "conflicts": 28713, + "confor": 40933, + "confron": 20033, + "confront": 38382, + "confrontation": 41478, + "confu": 6890, + "confuse": 37503, + "confused": 10946, + "confusing": 24683, + "confusion": 20493, + "cong": 24407, + "conge": 20013, + "congestion": 24432, + "congo": 20334, + "congr": 1227, + "congrats": 1887, + "congratul": 1750, + "congratulate": 16633, + "congratulated": 42004, + "congratulates": 24580, + "congratulating": 30967, + "congratulation": 24751, + "congratulations": 1864, + "congre": 7947, + "congreg": 40727, + "congregation": 32618, + "congress": 12452, + "congress": 4599, + "congressional": 15239, + "congressman": 17145, + "congresswoman": 37317, + "coni": 39031, + "coni": 36651, + "conj": 41543, + "conju": 33821, + "conjunction": 34226, + "conley": 44536, + "conline": 37593, + "conn": 41836, + "conn": 20329, + "conne": 8437, + "connec": 29933, + "connect": 19969, + "connected": 27506, + "connecting": 41429, + "connection": 26840, + "connections": 37161, + "connie": 25739, + "connoisse": 46012, + "connol": 27739, + "connolly": 29537, + "connor": 21984, + "connor": 10218, + "conom": 2664, + "conomy": 22529, + "conor": 29955, + "conor": 19478, + "conqu": 13382, + "conquer": 38585, + "conquer": 19821, + "conquered": 27099, + "conquering": 43778, + "conquest": 35367, + "conrad": 22073, + "cons": 10311, + "consci": 9427, + "conscience": 27310, + "conscious": 14914, + "consciously": 46755, + "consciousness": 17894, + "conse": 34887, + "consecu": 12084, + "consecutive": 12413, + "consen": 23110, + "consensus": 25071, + "consent": 21922, + "consequ": 13003, + "consequence": 42262, + "consequences": 15682, + "conserv": 4649, + "conservancy": 46729, + "conservation": 37616, + "conservation": 8322, + "conservative": 11421, + "conservatives": 17631, + "conservatory": 32140, + "conserve": 34231, + "consi": 2899, + "consider": 12471, + "consider": 6734, + "considerable": 38256, + "considerably": 38510, + "consideration": 24310, + "considerations": 33700, + "considered": 9487, + "considering": 10761, + "considers": 24691, + "consist": 10410, + "consist": 33735, + "consisted": 49354, + "consistency": 25683, + "consistent": 16439, + "consistently": 23799, + "consisting": 39241, + "consists": 23458, + "consol": 27869, + "consolation": 38888, + "console": 13403, + "consoles": 33136, + "consoli": 21586, + "consolidation": 41111, + "consor": 27108, + "consortium": 29988, + "conspir": 12680, + "conspiracy": 15236, + "const": 3826, + "constable": 29179, + "constan": 38718, + "constance": 40682, + "constant": 32000, + "constant": 13111, + "constantine": 30640, + "constantly": 14336, + "constell": 21913, + "constellation": 25991, + "constitu": 6299, + "constituency": 22464, + "constituents": 32075, + "constitution": 12157, + "constitutional": 16091, + "constra": 28973, + "constraints": 41910, + "constru": 3983, + "construc": 13321, + "construct": 24467, + "constructed": 16876, + "constructing": 33653, + "construction": 48873, + "construction": 4585, + "constructive": 31810, + "consu": 4689, + "consul": 5295, + "consul": 33630, + "consulate": 34341, + "consult": 9438, + "consult": 26727, + "consultancy": 31735, + "consultant": 14196, + "consultants": 27203, + "consultation": 15777, + "consultations": 43424, + "consulting": 15883, + "consume": 28919, + "consumed": 29653, + "consumer": 34408, + "consumer": 10422, + "consumers": 14014, + "consuming": 30607, + "consumption": 14904, + "cont": 2036, + "cont": 21425, + "contact": 39367, + "contact": 3523, + "contacted": 37331, + "contacts": 22789, + "contag": 29259, + "contagious": 33984, + "contain": 9948, + "contain": 15187, + "contained": 23836, + "container": 14913, + "containers": 20448, + "containing": 20281, + "contains": 12844, + "contamin": 24662, + "contaminated": 35773, + "contamination": 31770, + "conte": 15402, + "conte": 26882, + "contempl": 21924, + "contemplating": 33854, + "contempor": 14538, + "contemporary": 16607, + "contemporary": 8859, + "contemporaryart": 20212, + "contempt": 39293, + "conten": 42201, + "contender": 23573, + "contenders": 29711, + "content": 15526, + "content": 4750, + "contentmarketing": 20429, + "contents": 14850, + "contest": 23103, + "contest": 4576, + "contestalert": 27313, + "contestant": 25682, + "contestants": 28062, + "contested": 37845, + "contests": 32210, + "contex": 42015, + "context": 13089, + "conti": 46431, + "conti": 40842, + "contin": 1918, + "continent": 19623, + "continental": 14089, + "continents": 38642, + "conting": 27104, + "contingent": 36467, + "continu": 4688, + "continually": 34086, + "continuation": 38964, + "continue": 3942, + "continued": 10150, + "continues": 4305, + "continuing": 11009, + "continuity": 34035, + "continuous": 17033, + "continuously": 29634, + "continuum": 44978, + "contour": 34733, + "contr": 22871, + "contra": 9880, + "contra": 38620, + "contrac": 7581, + "contracep": 35109, + "contract": 6120, + "contracting": 39091, + "contractor": 21429, + "contractors": 22427, + "contracts": 16563, + "contradic": 27957, + "contrary": 32805, + "contrast": 18501, + "contrasting": 40758, + "contribu": 4753, + "contribute": 14112, + "contributed": 19397, + "contributes": 34203, + "contributing": 21762, + "contribution": 11116, + "contributions": 14465, + "contributor": 24553, + "contributors": 32908, + "contro": 2372, + "control": 9963, + "control": 3366, + "controlled": 14140, + "controller": 12929, + "controllers": 30374, + "controlling": 26427, + "controls": 15746, + "controversi": 13674, + "controversial": 14617, + "controversy": 18659, + "conv": 48382, + "conve": 18421, + "conven": 7283, + "conveni": 33278, + "convenience": 17859, + "convenient": 18978, + "conveniently": 40844, + "convention": 6752, + "conventional": 20835, + "conventions": 41404, + "conver": 6336, + "convergence": 35381, + "convers": 4577, + "conversation": 5690, + "conversations": 12326, + "converse": 24149, + "conversion": 15111, + "conversions": 44137, + "convert": 20074, + "converted": 20808, + "converter": 34611, + "convertible": 19608, + "converting": 34674, + "converts": 42470, + "convey": 38342, + "convic": 11150, + "convicted": 18668, + "conviction": 24967, + "convictions": 44366, + "convin": 12889, + "convince": 20351, + "convinced": 17388, + "convincing": 27742, + "convo": 19372, + "convocation": 30674, + "convos": 44842, + "convoy": 30292, + "conway": 21410, + "conwy": 48971, + "cony": 14501, + "coo": 1664, + "coo": 21691, + "coogs": 47624, + "cook": 9726, + "cook": 5977, + "cookbook": 21086, + "cooke": 29979, + "cooked": 11452, + "cooker": 23806, + "cookery": 38779, + "cookie": 9367, + "cookies": 8320, + "cookin": 46610, + "cooking": 39248, + "cooking": 6283, + "cookout": 39743, + "cooks": 24256, + "cool": 5594, + "cool": 2077, + "cooled": 37170, + "cooler": 11078, + "coolest": 10566, + "cooling": 15291, + "coom": 41726, + "coon": 34260, + "coon": 16958, + "coop": 39917, + "coop": 18910, + "cooper": 7264, + "cooper": 8133, + "cooperate": 42936, + "cooperation": 11785, + "cooperative": 24517, + "coops": 48531, + "coordin": 8187, + "coordinate": 38250, + "coordinated": 32540, + "coordinating": 40075, + "coordination": 25611, + "coordinator": 13967, + "coors": 36025, + "cop": 3196, + "cop": 7070, + "copa": 22749, + "copd": 45876, + "cope": 47635, + "cope": 12564, + "copeland": 37604, + "copen": 15637, + "copenhagen": 17390, + "coper": 41891, + "copernic": 45519, + "copied": 36770, + "copies": 9851, + "coping": 30545, + "copolitics": 45846, + "copp": 20937, + "copped": 42229, + "copper": 24741, + "copper": 10333, + "coppola": 47427, + "cops": 10719, + "copter": 28049, + "copy": 11376, + "copy": 4509, + "copying": 38925, + "copyright": 15778, + "cor": 851, + "cor": 18559, + "cora": 34953, + "coral": 31220, + "coral": 12054, + "corbett": 35699, + "corbin": 35578, + "corbyn": 14026, + "cord": 40893, + "cord": 11181, + "corden": 41999, + "cordi": 41681, + "cordless": 44412, + "cords": 22164, + "core": 19622, + "core": 5000, + "cores": 37874, + "corey": 31279, + "corey": 15288, + "corgi": 31320, + "cori": 26508, + "coriander": 37491, + "corin": 17716, + "corinthians": 34471, + "cork": 18148, + "cork": 10376, + "corn": 5202, + "corn": 5894, + "cornelius": 45865, + "cornell": 38689, + "cornell": 20859, + "corner": 18509, + "corner": 5253, + "corners": 19584, + "cornerstone": 36280, + "cornish": 23774, + "cornwall": 37903, + "cornwall": 10777, + "coron": 13210, + "corona": 25564, + "coronado": 43946, + "coronary": 45955, + "coronation": 25014, + "coroner": 47241, + "corp": 29203, + "corp": 10918, + "corpor": 4258, + "corporal": 42445, + "corporate": 33877, + "corporate": 6838, + "corporation": 11282, + "corporations": 25482, + "corps": 11330, + "corpse": 29408, + "corpus": 31672, + "correc": 5011, + "correct": 8340, + "corrected": 35628, + "correction": 20843, + "correctional": 38030, + "corrections": 37507, + "correctly": 15359, + "correlation": 29218, + "correspon": 20203, + "correspondent": 29996, + "corri": 12974, + "corridor": 20592, + "corrie": 23961, + "corro": 24936, + "corro": 42033, + "corrosion": 39191, + "corru": 6501, + "corrup": 30429, + "corrupt": 15194, + "corruption": 9141, + "corsa": 47670, + "corsair": 42367, + "corset": 40408, + "cortex": 40109, + "cortez": 30461, + "corvette": 24367, + "cory": 23221, + "cory": 18329, + "cos": 5865, + "cos": 5700, + "cosby": 30324, + "cosc": 45944, + "coscino": 47909, + "cose": 26495, + "cosm": 37486, + "cosme": 9628, + "cosmetic": 23918, + "cosmetics": 12896, + "cosmic": 47398, + "cosmic": 18304, + "cosmo": 12829, + "cosmo": 32072, + "cosmopolitan": 35518, + "cosmos": 22151, + "cospla": 15149, + "cosplay": 42401, + "cosplay": 6435, + "cosplayer": 30215, + "cosplaying": 46701, + "cost": 11360, + "cost": 4713, + "costa": 10480, + "costar": 28659, + "costarica": 31272, + "costco": 31045, + "costello": 30667, + "costing": 39193, + "costly": 30170, + "costs": 7628, + "costu": 5786, + "costume": 7235, + "costumes": 15150, + "cosy": 22848, + "cot": 4718, + "cot": 5871, + "cote": 44234, + "cote": 20751, + "cotland": 32576, + "cotsw": 23303, + "cotswolds": 35546, + "cott": 8211, + "cott": 11349, + "cottage": 12155, + "cottages": 34405, + "cotton": 22218, + "cotton": 7050, + "cou": 1368, + "couch": 12724, + "cougar": 35028, + "cougar": 27042, + "cougars": 20425, + "cough": 35631, + "cough": 18498, + "cougs": 28482, + "coul": 22483, + "could": 44812, + "could": 1510, + "couldn": 4072, + "couldnt": 29042, + "coulter": 42291, + "coun": 939, + "counc": 12927, + "council": 18187, + "council": 3620, + "councill": 15732, + "councillor": 21179, + "councillors": 29695, + "councilman": 40833, + "councils": 29938, + "counsel": 13780, + "counsel": 19814, + "counseling": 25000, + "counsell": 47510, + "counselling": 40581, + "counselor": 26148, + "counselors": 38688, + "count": 6073, + "count": 5887, + "countdown": 39559, + "countdown": 7500, + "counted": 23149, + "counter": 10134, + "counter": 7352, + "counterfe": 33067, + "counterfeit": 44242, + "counterpart": 39216, + "counterparts": 42106, + "counters": 46170, + "countess": 46276, + "counties": 12338, + "counting": 9723, + "countless": 21819, + "countries": 5489, + "country": 7896, + "country": 2157, + "countryfile": 47023, + "countrymusic": 30372, + "countryside": 16303, + "counts": 12264, + "county": 18734, + "county": 2116, + "coup": 9871, + "coup": 16479, + "coupe": 16773, + "couple": 40136, + "couple": 3377, + "coupled": 37153, + "couples": 14752, + "coupling": 45595, + "coupon": 14019, + "coupons": 23945, + "cour": 1391, + "coura": 4436, + "courage": 9828, + "courageous": 25005, + "courier": 27217, + "cours": 21493, + "course": 43225, + "course": 2613, + "courses": 9464, + "court": 16837, + "court": 2908, + "courte": 5088, + "courtesy": 5228, + "courthouse": 22205, + "courtney": 33601, + "courtney": 15990, + "courtroom": 41071, + "courts": 13514, + "courty": 20121, + "courtyard": 21900, + "cous": 48397, + "cousin": 7780, + "cousins": 14073, + "cout": 29118, + "coutinho": 35530, + "couture": 14808, + "cov": 19384, + "cov": 48385, + "cove": 21700, + "cove": 14708, + "coven": 12483, + "covenant": 29647, + "coventry": 18007, + "cover": 13534, + "cover": 2202, + "coverage": 6810, + "covered": 5603, + "covering": 9462, + "covers": 7745, + "covert": 40134, + "coveted": 36119, + "covington": 43196, + "cow": 5076, + "cow": 9706, + "cowan": 42699, + "coward": 33729, + "cowards": 48972, + "cowboy": 25833, + "cowboy": 13657, + "cowboys": 11864, + "cowboysnation": 43082, + "cowell": 39015, + "cowgirl": 47090, + "coworker": 30727, + "coworkers": 30821, + "coworking": 36034, + "cows": 15204, + "cowx": 23831, + "cox": 25784, + "cox": 11597, + "coy": 12765, + "coy": 15742, + "coyi": 48407, + "coyle": 45348, + "coyne": 44729, + "coyo": 16614, + "coyote": 26586, + "coyotes": 30423, + "coys": 19736, + "coz": 39922, + "coz": 14282, + "cozy": 14873, + "cp": 7905, + "cp": 9130, + "cpa": 30095, + "cpac": 45731, + "cpc": 26125, + "cpd": 23402, + "cpec": 48007, + "cpfc": 27553, + "cpi": 41795, + "cpl": 26852, + "cpr": 25134, + "cps": 27078, + "cpt": 32892, + "cpu": 27700, + "cq": 48910, + "cq": 48417, + "cr": 1075, + "cr": 3483, + "cra": 1184, + "cra": 18362, + "crab": 27382, + "crab": 11574, + "crabs": 30908, + "crack": 11222, + "crack": 10334, + "crackdown": 29527, + "cracked": 19826, + "cracker": 16298, + "crackers": 26200, + "cracking": 13008, + "cracks": 21426, + "cracy": 24749, + "cradle": 29384, + "crae": 40438, + "craf": 10873, + "craft": 7717, + "craft": 3588, + "craftbeer": 12371, + "crafted": 12424, + "crafthour": 42324, + "crafting": 26886, + "crafts": 33276, + "crafts": 13383, + "craftsman": 39528, + "craftsmanship": 36682, + "crafty": 32317, + "craic": 46962, + "craig": 14042, + "craig": 8061, + "craigslist": 43865, + "cram": 29809, + "cramer": 44592, + "cramps": 46106, + "cran": 7761, + "cranberries": 49361, + "cranberry": 23824, + "crane": 14626, + "cranes": 26979, + "crani": 45674, + "crank": 46246, + "crank": 32283, + "cranston": 44340, + "crap": 11899, + "crappy": 30475, + "crash": 37150, + "crash": 5033, + "crashed": 16638, + "crashes": 17013, + "crashing": 24991, + "crat": 46696, + "crate": 24756, + "crater": 22663, + "crates": 30172, + "cratic": 32175, + "crative": 39999, + "crats": 43056, + "crave": 33397, + "craven": 33625, + "craving": 18344, + "cravings": 34476, + "craw": 7400, + "crawfish": 42772, + "crawford": 15918, + "crawl": 20106, + "crawler": 41012, + "crawley": 42316, + "crawling": 37066, + "cray": 24184, + "cray": 27032, + "crayon": 41801, + "crayons": 43508, + "craz": 25776, + "craze": 30637, + "craziest": 32690, + "craziness": 46436, + "crazy": 17540, + "crazy": 3578, + "crc": 25618, + "cre": 798, + "cre": 17762, + "cream": 23184, + "cream": 3867, + "creams": 41447, + "creamy": 17206, + "crease": 48441, + "create": 30949, + "create": 3380, + "created": 4080, + "creates": 10361, + "creati": 6714, + "creating": 5524, + "creation": 38293, + "creation": 6900, + "creations": 17411, + "creative": 15237, + "creative": 4450, + "creatives": 29352, + "creativity": 9636, + "creator": 10173, + "creators": 17981, + "creature": 14317, + "creatures": 13938, + "cred": 7314, + "cred": 22377, + "credenti": 29487, + "credentials": 33422, + "credi": 21097, + "credibility": 34984, + "credible": 32983, + "credit": 21467, + "credit": 3900, + "credited": 32480, + "credits": 10654, + "creds": 43462, + "cree": 33961, + "cree": 36014, + "creed": 18845, + "creek": 26120, + "creek": 5526, + "creep": 8153, + "creep": 26084, + "creeper": 38662, + "creeping": 29697, + "creeps": 45135, + "creepy": 11943, + "creighton": 42823, + "creme": 22681, + "creole": 45632, + "crepe": 38611, + "crescent": 18211, + "cress": 39124, + "crest": 35985, + "crest": 15760, + "crested": 36656, + "crete": 8584, + "crew": 21560, + "crew": 3462, + "crewe": 43284, + "crews": 10463, + "cri": 1621, + "cri": 38962, + "crib": 23271, + "cric": 4328, + "cricke": 19098, + "cricket": 21859, + "cricket": 5373, + "cricketer": 28439, + "cricketers": 43986, + "cried": 15290, + "cries": 19769, + "crime": 13872, + "crime": 4896, + "crimea": 28614, + "crimes": 11827, + "crimin": 5874, + "criminal": 30197, + "criminal": 8255, + "criminals": 18783, + "crimson": 19437, + "cringe": 42588, + "cripp": 33588, + "cris": 37818, + "crises": 36403, + "crisis": 5712, + "crisp": 15145, + "crispr": 39784, + "crisps": 35744, + "crispy": 16458, + "criss": 29708, + "cristi": 12699, + "cristian": 48808, + "cristiano": 14807, + "cristina": 33395, + "cristo": 38315, + "crit": 3613, + "crit": 48130, + "criteri": 33627, + "criteria": 24849, + "criterion": 43841, + "criti": 25333, + "critic": 12417, + "critic": 19361, + "critical": 15314, + "critical": 6808, + "critically": 21570, + "criticalrole": 33606, + "criticalrole": 22742, + "criticalrolefanart": 43663, + "critici": 20333, + "criticism": 17405, + "criticize": 46081, + "criticized": 41557, + "critics": 16946, + "critique": 32982, + "critters": 35423, + "crm": 22610, + "cro": 1192, + "cro": 22522, + "croati": 28072, + "croatia": 13323, + "croatian": 34795, + "croc": 43350, + "croche": 35352, + "crochet": 17554, + "crock": 41685, + "crocker": 47843, + "crockett": 48313, + "crocod": 24519, + "crocodile": 24757, + "crocs": 38988, + "croft": 16657, + "croissant": 46011, + "croix": 44735, + "crom": 25082, + "crombie": 46162, + "cromwell": 45345, + "cron": 17361, + "croo": 16443, + "crook": 43744, + "crooked": 48473, + "crooked": 25644, + "crooks": 44226, + "crop": 40751, + "crop": 9955, + "cropped": 31139, + "crops": 16290, + "crore": 18274, + "crores": 37281, + "cros": 16670, + "crosby": 21095, + "cross": 5266, + "cross": 3417, + "crossed": 11731, + "crosses": 20473, + "crossfit": 47214, + "crossfit": 20395, + "crossing": 8673, + "crossings": 43517, + "crossover": 17194, + "crossroads": 27427, + "crossword": 32945, + "crou": 31206, + "crouch": 36506, + "crow": 3138, + "crow": 16019, + "crowd": 12036, + "crowd": 4570, + "crowded": 20182, + "crowdfunding": 17971, + "crowds": 16092, + "crowe": 33560, + "crowley": 32287, + "crown": 22190, + "crown": 6902, + "crowned": 16109, + "crowns": 33229, + "crows": 27134, + "croy": 21676, + "croydon": 27116, + "crs": 28449, + "crt": 43877, + "cru": 1815, + "cru": 29788, + "cruci": 18499, + "crucial": 12396, + "crude": 20677, + "cruel": 16073, + "cruel": 17573, + "cruelty": 20675, + "cruis": 27721, + "cruise": 36425, + "cruise": 6764, + "cruiser": 21394, + "cruises": 19214, + "cruising": 19743, + "crum": 43268, + "crumb": 48327, + "crumb": 39909, + "crumble": 36595, + "crumbs": 35893, + "crun": 17407, + "crunch": 16620, + "crunchy": 31366, + "crusad": 19133, + "crusade": 36846, + "crusader": 40171, + "crusaders": 31319, + "crush": 22296, + "crush": 7610, + "crushed": 18270, + "crusher": 44923, + "crushes": 35844, + "crushing": 20790, + "crust": 23136, + "crusted": 37314, + "cruz": 33689, + "cruz": 8403, + "cry": 2837, + "cry": 6290, + "crying": 6828, + "cryo": 32215, + "cryp": 4865, + "crypt": 37814, + "cryptic": 46925, + "crypto": 8080, + "crypto": 9608, + "cryptocurrencies": 33329, + "cryptocurrency": 12070, + "cryst": 15891, + "crystal": 17387, + "crystal": 6517, + "crystalli": 47551, + "crystals": 18350, + "cs": 11978, + "cs": 2804, + "csa": 26355, + "csc": 41727, + "csc": 37266, + "csd": 36913, + "cse": 41659, + "csg": 47085, + "csgo": 28928, + "csi": 41750, + "csi": 28070, + "csk": 43036, + "csm": 40061, + "csn": 46329, + "cso": 43864, + "csp": 39243, + "csr": 32105, + "csr": 24598, + "csrracing": 44193, + "css": 41418, + "css": 19846, + "cst": 17016, + "csu": 35948, + "csu": 31261, + "csw": 41031, + "ct": 3381, + "ct": 1122, + "cta": 28397, + "ctar": 27842, + "ctc": 34123, + "cte": 31410, + "cted": 2910, + "ctf": 35250, + "cthulhu": 41064, + "cting": 7985, + "ction": 17578, + "ction": 1569, + "ctions": 7021, + "ctive": 9313, + "cto": 17445, + "ctor": 8108, + "ctr": 35602, + "ctr": 18481, + "cts": 6936, + "ctto": 25118, + "ctu": 20834, + "cture": 17668, + "ctv": 21213, + "ctv": 27590, + "cu": 729, + "cu": 11224, + "cuando": 40388, + "cub": 16938, + "cub": 19972, + "cuba": 11576, + "cuban": 15536, + "cube": 47753, + "cube": 11353, + "cubes": 31413, + "cubic": 48159, + "cubic": 29614, + "cubs": 9858, + "cuck": 26364, + "cuckoo": 38062, + "cucu": 16705, + "cucumber": 19787, + "cucumbers": 48065, + "cud": 42684, + "cudd": 12820, + "cuddle": 19568, + "cuddles": 24001, + "cuddling": 29696, + "cuddly": 36208, + "cudi": 48713, + "cue": 13424, + "cuer": 39506, + "cues": 35719, + "cuff": 34693, + "cuff": 22414, + "cufflinks": 43938, + "cuffs": 37221, + "cuis": 9938, + "cuisine": 10605, + "cuk": 34838, + "cul": 1877, + "cula": 35935, + "cular": 10940, + "culars": 45719, + "cule": 31066, + "cules": 18984, + "culin": 14772, + "culinary": 16466, + "cull": 21880, + "cull": 42061, + "cullen": 25973, + "culmin": 33778, + "culo": 36305, + "culprit": 41593, + "cult": 11965, + "cultiv": 16781, + "cultivate": 42983, + "cultivated": 48901, + "cultivation": 41539, + "cultur": 20780, + "cultural": 34908, + "cultural": 6753, + "culturally": 36783, + "culture": 20197, + "culture": 3673, + "cultured": 40176, + "cultures": 19552, + "culver": 42103, + "cum": 20142, + "cum": 27119, + "cumb": 10858, + "cumber": 15309, + "cumberbatch": 27541, + "cumberland": 28747, + "cumbri": 32010, + "cumbria": 17953, + "cumin": 42285, + "cumple": 47050, + "cumul": 42961, + "cumulative": 47610, + "cumulus": 46313, + "cun": 12423, + "cun": 29532, + "cunningham": 25321, + "cuomo": 25681, + "cup": 5059, + "cup": 1937, + "cupboard": 32074, + "cupcake": 17025, + "cupcakes": 12747, + "cupid": 34885, + "cuppa": 28077, + "cups": 11463, + "cur": 1092, + "cur": 33073, + "curated": 20341, + "curator": 20753, + "curb": 21931, + "curd": 38881, + "cure": 36758, + "cure": 9088, + "cured": 26248, + "cures": 38204, + "curfew": 48826, + "curi": 12640, + "curing": 44169, + "curiosity": 21583, + "curious": 9865, + "curl": 24306, + "curled": 43734, + "curling": 18543, + "curls": 24340, + "curly": 20795, + "curran": 40999, + "currant": 43501, + "curren": 6142, + "currencies": 23530, + "currency": 7853, + "current": 3653, + "currently": 3792, + "currents": 35450, + "curric": 16201, + "curriculum": 17947, + "currie": 39385, + "curry": 49285, + "curry": 8051, + "curse": 18479, + "cursed": 26408, + "cursor": 46546, + "curt": 38137, + "curtain": 17223, + "curtains": 30223, + "curti": 39925, + "curtis": 13808, + "curve": 15792, + "curved": 25789, + "curves": 22814, + "curvy": 45788, + "cus": 2736, + "cusa": 47414, + "cuse": 37950, + "cush": 43731, + "cushi": 15333, + "cushion": 20853, + "cushions": 34163, + "cussion": 16658, + "cussions": 46853, + "cust": 20900, + "custard": 26516, + "custo": 4376, + "custody": 16176, + "custom": 2662, + "custom": 4996, + "custome": 41323, + "customer": 24035, + "customer": 5102, + "customerexperience": 45167, + "customers": 5528, + "customerservice": 40611, + "customiz": 41793, + "customizable": 48253, + "customization": 48244, + "customize": 32179, + "customized": 23229, + "customs": 16880, + "cut": 10511, + "cut": 3032, + "cute": 16031, + "cute": 2242, + "cuteness": 19342, + "cuter": 27151, + "cutest": 8032, + "cuth": 44328, + "cutie": 10733, + "cuties": 40939, + "cuties": 23420, + "cutiesaturday": 41883, + "cutler": 40428, + "cutlery": 49073, + "cutout": 45016, + "cuts": 7435, + "cutt": 27338, + "cutt": 47647, + "cutter": 19719, + "cutters": 44783, + "cutting": 7266, + "cuz": 9215, + "cv": 13531, + "cv": 13947, + "cvs": 29603, + "cw": 10652, + "cw": 11065, + "cwc": 19179, + "cwgc": 48527, + "cws": 45186, + "cx": 44457, + "cx": 14283, + "cy": 1470, + "cy": 1678, + "cyber": 5830, + "cyber": 10210, + "cybercrime": 41772, + "cybermonday": 36578, + "cyberpunk": 36896, + "cybersecurity": 10581, + "cyborg": 36650, + "cycl": 9791, + "cycle": 19083, + "cycle": 5072, + "cycled": 31055, + "cycles": 14605, + "cycli": 12201, + "cycling": 26353, + "cycling": 6321, + "cyclist": 20686, + "cyclists": 20303, + "cyclo": 18122, + "cyclone": 48094, + "cyclone": 20917, + "cyclones": 34669, + "cylin": 18569, + "cylinder": 22092, + "cylinders": 48888, + "cymb": 36677, + "cymru": 24005, + "cyn": 14324, + "cynthi": 41994, + "cynthia": 23748, + "cyp": 14809, + "cypress": 25347, + "cypri": 36481, + "cyprus": 15263, + "cyril": 36028, + "cyrus": 14204, + "cystic": 46131, + "cyto": 31864, + "cz": 22898, + "cz": 22921, + "cze": 12152, + "czech": 43151, + "czech": 16141, + "cé": 36454, + "cé": 18317, + "d": 67, + "d": 323, + "da": 925, + "da": 1140, + "daa": 32642, + "daan": 44814, + "dab": 10413, + "dab": 22900, + "dac": 16222, + "dac": 27478, + "daca": 28477, + "dach": 34166, + "dachsh": 41641, + "dachshund": 42720, + "dad": 4346, + "dad": 2639, + "dada": 31325, + "daddy": 29466, + "daddy": 6546, + "dade": 23299, + "dades": 28289, + "dads": 12741, + "dae": 23358, + "dae": 15422, + "daener": 46934, + "daes": 47282, + "daesh": 35047, + "daf": 9972, + "daf": 36704, + "daffodils": 44769, + "daft": 36347, + "dag": 11434, + "dag": 25650, + "dagger": 34251, + "dah": 16976, + "dah": 11776, + "dahl": 45816, + "dahl": 22621, + "dahlia": 41768, + "dai": 13559, + "dai": 10632, + "dail": 14676, + "dailies": 21260, + "daily": 6689, + "daily": 2873, + "dailynews": 43466, + "dailys": 43160, + "dailysketch": 46738, + "daim": 40421, + "dain": 32222, + "dain": 28315, + "daipur": 47631, + "dair": 19998, + "dair": 42078, + "dairy": 25243, + "dairy": 10302, + "dairyfree": 49366, + "dais": 10502, + "daisi": 39947, + "daisies": 40654, + "daisy": 39310, + "daisy": 12865, + "dak": 6999, + "dak": 16095, + "dakar": 31137, + "dakota": 38522, + "dakota": 12358, + "dal": 2476, + "dal": 5601, + "dala": 42675, + "dalai": 41222, + "dalail": 35169, + "dalailama": 35849, + "dale": 11533, + "dale": 4677, + "dalejr": 38207, + "dales": 29031, + "daley": 28544, + "dalgo": 43614, + "dali": 36735, + "dali": 25703, + "dalit": 45432, + "dall": 43631, + "dalla": 16772, + "dallas": 27414, + "dallas": 5759, + "dallascowboys": 33016, + "dalmati": 44275, + "dalton": 21488, + "daly": 24873, + "dam": 1880, + "dam": 4926, + "damage": 6822, + "damaged": 13568, + "damages": 28842, + "damaging": 20610, + "damas": 23345, + "damascus": 25396, + "dame": 10069, + "dames": 44548, + "dami": 17783, + "damian": 43307, + "damian": 25375, + "damien": 25090, + "dammit": 31057, + "damn": 37409, + "damn": 4451, + "damned": 28428, + "damon": 48503, + "damon": 18244, + "damp": 26520, + "dams": 37680, + "dan": 2257, + "dan": 2284, + "dana": 44834, + "dana": 13777, + "danao": 38598, + "danc": 3945, + "dance": 10619, + "dance": 2724, + "danced": 32891, + "dancehall": 33300, + "dancer": 11400, + "dancers": 13153, + "dances": 24083, + "dancing": 33280, + "dancing": 6226, + "dand": 12593, + "dandelion": 38903, + "dandy": 31932, + "dane": 19330, + "danes": 47477, + "dang": 4283, + "dang": 14992, + "danger": 20083, + "danger": 11212, + "dangerous": 7350, + "dangerously": 35012, + "dangers": 23726, + "dangle": 39907, + "dani": 3001, + "dani": 17009, + "daniel": 7859, + "daniel": 4981, + "daniela": 44466, + "danielle": 30396, + "danielle": 15292, + "danielpadilla": 34702, + "daniels": 16146, + "danish": 15467, + "dank": 31849, + "dann": 11951, + "danny": 14950, + "danny": 7621, + "dano": 29703, + "dans": 16241, + "dant": 48097, + "dant": 28237, + "dante": 21911, + "danube": 44594, + "dany": 47816, + "dao": 36099, + "dap": 12149, + "dap": 38034, + "daph": 24591, + "daphne": 31687, + "dapl": 34478, + "dapp": 46857, + "dapper": 26071, + "daq": 25381, + "dar": 1377, + "dar": 6242, + "dara": 17064, + "darby": 34366, + "darcy": 32916, + "dare": 14833, + "dare": 9863, + "daredevil": 28849, + "dares": 42973, + "dareto": 46794, + "dari": 16292, + "dari": 14552, + "daria": 45622, + "daries": 18184, + "daring": 28166, + "dario": 33918, + "darius": 32606, + "darje": 49089, + "dark": 5724, + "dark": 3144, + "darker": 18737, + "darkest": 25898, + "darkness": 10521, + "darling": 13048, + "darlings": 39961, + "darlington": 34565, + "darn": 26059, + "darrell": 33522, + "darren": 20263, + "darren": 12275, + "darry": 29200, + "darryl": 35359, + "darshan": 34564, + "dart": 14001, + "dart": 19841, + "darth": 41304, + "darth": 23164, + "dartmoor": 31477, + "dartmouth": 29667, + "darts": 15246, + "darwin": 43013, + "darwin": 20926, + "daryl": 45607, + "daryl": 24532, + "das": 9940, + "das": 7359, + "dash": 13858, + "dash": 10206, + "dashboard": 27679, + "dashi": 12876, + "dashing": 33825, + "dat": 1717, + "dat": 9445, + "data": 14876, + "data": 2281, + "datab": 11941, + "database": 14678, + "databases": 48384, + "datac": 27329, + "datacenter": 40133, + "datasci": 14496, + "datascience": 15748, + "dataviz": 28138, + "date": 34300, + "date": 1524, + "dated": 13564, + "dates": 7228, + "dating": 8534, + "dation": 15311, + "datlantic": 34270, + "dato": 36075, + "dats": 48674, + "dau": 3162, + "dau": 33828, + "daugh": 42523, + "daughter": 3944, + "daughters": 13585, + "daun": 29470, + "dav": 3700, + "dav": 46488, + "davao": 31502, + "dave": 10089, + "dave": 5077, + "daven": 28350, + "davenport": 34624, + "davey": 33391, + "davi": 1732, + "david": 4640, + "david": 2259, + "davidbowie": 44448, + "davido": 35989, + "davids": 46695, + "davidson": 13166, + "davies": 13120, + "davin": 43187, + "davis": 24426, + "davis": 5536, + "davison": 43725, + "davos": 31887, + "davy": 41565, + "daw": 5971, + "daw": 24404, + "dawg": 18660, + "dawgs": 26431, + "dawn": 30590, + "dawn": 7689, + "dawson": 18611, + "dax": 29458, + "day": 1405, + "day": 575, + "daya": 38165, + "daybreak": 33862, + "daycare": 36363, + "daydream": 41587, + "dayin": 20332, + "daylight": 20809, + "dayo": 29856, + "dayo": 46605, + "dayof": 16272, + "dayofthe": 38043, + "days": 1161, + "daysof": 12379, + "daysofcode": 36537, + "daysto": 29886, + "daystogo": 42198, + "dayswild": 42052, + "daytime": 22830, + "dayton": 35729, + "dayton": 20262, + "daytona": 16335, + "dayweekend": 44526, + "dayz": 35949, + "daz": 15449, + "daz": 43844, + "daze": 33591, + "dazz": 17149, + "dazzle": 41164, + "dazzling": 28821, + "db": 19100, + "db": 8128, + "dbacks": 31175, + "dbs": 40558, + "dbz": 49226, + "dc": 5074, + "dc": 2743, + "dca": 49107, + "dcc": 33747, + "dccomics": 17610, + "dcfc": 35526, + "dci": 35336, + "dcs": 42878, + "dcu": 42647, + "dd": 1353, + "dd": 3766, + "dda": 35202, + "ddad": 39049, + "dday": 32689, + "dday": 26243, + "ddc": 48513, + "ddd": 24183, + "dddd": 35362, + "dden": 5013, + "dder": 9300, + "dders": 24827, + "ddi": 44450, + "ddin": 17175, + "dding": 48101, + "dding": 8974, + "ddings": 49106, + "ddington": 29238, + "ddle": 17633, + "ddle": 8357, + "ddled": 38392, + "ddles": 33901, + "ddleston": 25647, + "ddling": 30981, + "ddlovato": 28244, + "ddos": 46463, + "ddr": 26027, + "dds": 48334, + "ddu": 43836, + "ddy": 14981, + "ddy": 7876, + "de": 561, + "de": 654, + "dea": 18477, + "deacon": 29155, + "dead": 3906, + "dead": 2747, + "deadliest": 40811, + "deadline": 47209, + "deadline": 8458, + "deadlines": 44959, + "deadly": 10756, + "deadpool": 21471, + "deaf": 28229, + "deaf": 18358, + "deal": 7249, + "deal": 2696, + "dealer": 15218, + "dealers": 21697, + "dealership": 32096, + "dealing": 13138, + "deals": 4469, + "dealt": 30101, + "dean": 13807, + "dean": 5828, + "deandre": 43635, + "deans": 46852, + "dear": 15696, + "dear": 3817, + "dearest": 24880, + "dearly": 31880, + "deas": 34715, + "death": 7163, + "death": 2767, + "deaths": 12253, + "deau": 12399, + "deaux": 19883, + "deb": 2987, + "deb": 25687, + "debat": 32082, + "debate": 5196, + "debates": 19239, + "debating": 23472, + "debbie": 47186, + "debbie": 16735, + "debit": 32410, + "debor": 16738, + "deborah": 40997, + "deborah": 22150, + "debra": 33233, + "debris": 19208, + "debt": 8932, + "debts": 38770, + "debu": 9790, + "debun": 33123, + "debut": 42608, + "debut": 4085, + "debuted": 25215, + "debuting": 34817, + "debuts": 17044, + "dec": 3063, + "dec": 4628, + "deca": 33428, + "decad": 29914, + "decade": 11099, + "decadent": 41716, + "decades": 10488, + "decal": 26678, + "decals": 37606, + "decan": 40677, + "decat": 35334, + "decath": 47455, + "decatur": 38540, + "decay": 22703, + "dece": 3534, + "deceased": 30035, + "december": 3864, + "decent": 10698, + "decentr": 28960, + "decentralized": 38485, + "decep": 33529, + "deception": 33046, + "deci": 2262, + "decide": 8447, + "decided": 4939, + "decides": 17269, + "deciding": 22513, + "decision": 5575, + "decisions": 9903, + "decisive": 28690, + "deck": 24885, + "deck": 6943, + "decked": 39096, + "decker": 21449, + "decks": 23968, + "decl": 7091, + "decla": 10739, + "declan": 42341, + "declar": 18040, + "declaration": 19714, + "declare": 19856, + "declared": 13845, + "declares": 23641, + "declaring": 33273, + "decline": 15084, + "declined": 28911, + "declines": 40478, + "declining": 29221, + "deco": 26412, + "deco": 16422, + "decor": 5148, + "decor": 6928, + "decorate": 23651, + "decorated": 15917, + "decorating": 16968, + "decoration": 16029, + "decorations": 19158, + "decorative": 19289, + "decre": 12284, + "decrease": 24703, + "decreased": 33913, + "decreasing": 43763, + "decree": 43327, + "ded": 16744, + "ded": 1241, + "dedic": 4701, + "dedicate": 27610, + "dedicated": 6770, + "dedication": 10188, + "dedly": 36204, + "deduc": 22799, + "dee": 5268, + "dee": 6705, + "deed": 30260, + "deeds": 24516, + "deejay": 48304, + "deejay": 44511, + "deemed": 28102, + "deen": 26456, + "deen": 12912, + "deep": 5462, + "deep": 3383, + "deepak": 45528, + "deeper": 15224, + "deepest": 22245, + "deephouse": 35684, + "deepi": 19371, + "deepika": 34120, + "deepikap": 29903, + "deepikapadukone": 30646, + "deeplear": 22181, + "deeplearning": 24362, + "deeply": 11449, + "deer": 19454, + "deer": 8700, + "deere": 32901, + "dees": 12547, + "deets": 35537, + "def": 2044, + "def": 11649, + "defam": 35670, + "defamation": 42741, + "default": 21650, + "defe": 4148, + "defeat": 8477, + "defeated": 8927, + "defeating": 22594, + "defeats": 16317, + "defect": 44013, + "defects": 37485, + "defen": 3619, + "defence": 30307, + "defence": 9659, + "defend": 21970, + "defend": 11397, + "defended": 27161, + "defender": 10618, + "defenders": 20063, + "defending": 13098, + "defends": 20134, + "defense": 45875, + "defense": 6021, + "defenseman": 43714, + "defenses": 49198, + "defensive": 10824, + "defi": 17244, + "defiance": 36186, + "defiant": 47597, + "defibrill": 47684, + "defic": 18022, + "defici": 23387, + "deficiency": 30685, + "deficit": 20156, + "defin": 3188, + "define": 14919, + "defined": 15278, + "defines": 28218, + "defining": 20504, + "definite": 40793, + "definitely": 4824, + "definition": 11405, + "definitive": 25298, + "defl": 31467, + "deforestation": 41330, + "defstar": 36427, + "defy": 39148, + "defying": 38496, + "deg": 38498, + "degra": 28939, + "degradation": 44468, + "degre": 4653, + "degree": 7119, + "degrees": 8000, + "deh": 35582, + "dei": 33833, + "dei": 23279, + "deir": 42948, + "deity": 42574, + "deja": 46902, + "dek": 23901, + "dekalb": 37775, + "del": 1233, + "del": 2003, + "dela": 37986, + "delaney": 31528, + "delav": 23706, + "delavin": 40477, + "delavin": 40776, + "delavinkisses": 40631, + "delaware": 17547, + "delay": 12955, + "delay": 10934, + "delayed": 14567, + "delaying": 43781, + "delays": 11232, + "dele": 7922, + "dele": 33431, + "delec": 38615, + "delectable": 45500, + "deleg": 8046, + "delegate": 27259, + "delegates": 14623, + "delegation": 14632, + "delete": 19204, + "deleted": 16588, + "deleting": 41857, + "delft": 42749, + "delgado": 49182, + "delhi": 26723, + "delhi": 5717, + "deli": 1932, + "deli": 18601, + "delia": 33193, + "deliber": 18316, + "deliberate": 38271, + "deliberately": 35163, + "delic": 13366, + "delicacy": 49181, + "delicate": 18768, + "delici": 19993, + "delicious": 3959, + "deliciously": 39589, + "deliciousness": 42819, + "delight": 46165, + "delight": 13073, + "delighted": 5943, + "delightful": 15513, + "delights": 25330, + "deline": 18797, + "delines": 13562, + "delish": 25093, + "deliver": 19561, + "deliver": 7396, + "delivered": 7278, + "deliveries": 29336, + "delivering": 9943, + "delivers": 11753, + "delivery": 5619, + "dell": 24381, + "dell": 10242, + "della": 22986, + "delle": 35963, + "deloit": 29428, + "deloitte": 38667, + "dels": 48636, + "delta": 32250, + "delta": 8768, + "delu": 18779, + "delusional": 48059, + "delux": 13709, + "deluxe": 14056, + "delve": 46008, + "dely": 15040, + "dem": 3251, + "dem": 7825, + "dema": 40268, + "dema": 45046, + "deman": 48366, + "demand": 13072, + "demand": 5650, + "demanded": 33699, + "demanding": 17099, + "demands": 14241, + "demar": 46566, + "demarcus": 47873, + "demb": 35930, + "demdebate": 43973, + "deme": 25143, + "demean": 37376, + "demen": 12604, + "dementi": 46028, + "dementia": 14047, + "demetri": 39553, + "demi": 32879, + "demi": 14480, + "demise": 28756, + "demo": 2930, + "demo": 7380, + "democr": 3573, + "democracy": 7758, + "democrat": 15431, + "democratic": 9149, + "democrats": 8865, + "demographic": 31308, + "demol": 19382, + "demolished": 26537, + "demolition": 22237, + "demon": 5635, + "demon": 12085, + "demonetisation": 41338, + "demonic": 46920, + "demons": 18388, + "demonstr": 8579, + "demonstrate": 22231, + "demonstrated": 29477, + "demonstrates": 24806, + "demonstrating": 22107, + "demonstration": 16722, + "demonstrations": 33964, + "demonstrators": 46450, + "demos": 19304, + "demp": 22490, + "dempsey": 30188, + "dems": 10989, + "demsin": 42664, + "demsinphilly": 43091, + "den": 1177, + "den": 1181, + "dena": 32431, + "denali": 48076, + "dence": 3370, + "dency": 11659, + "dend": 37447, + "dends": 43985, + "dene": 45128, + "dened": 19571, + "deng": 43098, + "deng": 41788, + "dengue": 41932, + "denham": 39180, + "deni": 21995, + "denial": 25716, + "denied": 15780, + "denies": 19565, + "denim": 13606, + "denis": 47630, + "denis": 18750, + "denise": 45900, + "denise": 20899, + "denmark": 13268, + "dennis": 32738, + "dennis": 10534, + "denny": 26808, + "denomin": 41016, + "dens": 16533, + "dense": 19353, + "density": 22431, + "dent": 3593, + "dent": 1258, + "dental": 24635, + "dental": 8382, + "dentally": 10346, + "dented": 21923, + "denti": 4418, + "dential": 5459, + "dentist": 17816, + "dentistry": 25754, + "dently": 28817, + "denton": 23567, + "dents": 1517, + "denver": 27847, + "denver": 8569, + "deny": 18679, + "denying": 32771, + "denzel": 42503, + "deo": 26406, + "deo": 12121, + "deodor": 47639, + "deol": 41902, + "deon": 31466, + "deon": 16079, + "dep": 6079, + "dep": 24370, + "depar": 10794, + "depart": 5343, + "depart": 30649, + "departed": 32541, + "departing": 26902, + "department": 5744, + "departments": 29523, + "departs": 38998, + "departure": 17850, + "depe": 36118, + "depend": 13894, + "depend": 27371, + "dependence": 40243, + "dependent": 23280, + "depending": 23673, + "depends": 20497, + "depic": 11307, + "depicted": 34637, + "depicting": 24970, + "depiction": 31071, + "depicts": 29340, + "deple": 38504, + "deplo": 9356, + "deplor": 39232, + "deploy": 26944, + "deployed": 20009, + "deploying": 42212, + "deployment": 20183, + "depo": 14276, + "depor": 36110, + "deport": 23389, + "deportation": 36617, + "deported": 39320, + "deportes": 47878, + "depos": 21266, + "deposit": 16775, + "deposits": 30740, + "depot": 12589, + "depp": 24941, + "depre": 7107, + "depress": 38869, + "depressed": 23269, + "depressing": 29235, + "depression": 10023, + "depri": 28587, + "depriv": 45809, + "deprivation": 47810, + "deprived": 39140, + "dept": 9201, + "depth": 10350, + "depths": 28855, + "depu": 6912, + "deputies": 24914, + "deputy": 7932, + "der": 839, + "der": 801, + "dera": 20696, + "derail": 48502, + "derby": 13904, + "derby": 7177, + "derbyshire": 22147, + "derdale": 21513, + "dere": 5701, + "dere": 44194, + "dered": 3776, + "derek": 22461, + "derek": 11205, + "derel": 46728, + "derer": 11289, + "derers": 20882, + "deri": 34573, + "derick": 33908, + "dering": 6076, + "deriv": 33458, + "derived": 26461, + "derland": 35488, + "derman": 29740, + "dermatology": 48051, + "dern": 30086, + "dero": 37203, + "dero": 34026, + "derrick": 21798, + "derry": 45777, + "derry": 20535, + "ders": 37307, + "ders": 1923, + "derson": 12677, + "dery": 17172, + "des": 6797, + "des": 1437, + "desai": 35316, + "desc": 13866, + "descen": 32318, + "descend": 26004, + "descend": 46241, + "descendants": 36323, + "descending": 36620, + "descent": 19375, + "desch": 49209, + "descri": 4637, + "describe": 10967, + "described": 14671, + "describes": 13678, + "describing": 24239, + "descrip": 41832, + "description": 13951, + "descriptions": 40653, + "desde": 42218, + "dese": 27195, + "deser": 3659, + "desert": 45776, + "desert": 7301, + "deserted": 41560, + "deserve": 7043, + "deserved": 10061, + "deserves": 9079, + "deserving": 26615, + "desh": 25320, + "desh": 7448, + "deshi": 42769, + "desi": 6772, + "desi": 26635, + "desig": 1250, + "design": 8359, + "design": 1681, + "designated": 24119, + "designation": 41155, + "designed": 4486, + "designer": 35640, + "designer": 5728, + "designers": 12720, + "designing": 13467, + "designs": 6747, + "designthinking": 32450, + "desirable": 32368, + "desire": 11858, + "desired": 28631, + "desires": 27598, + "desk": 11937, + "desk": 6550, + "desks": 41014, + "desktop": 14345, + "desmond": 27821, + "desol": 41258, + "desp": 3642, + "despair": 28097, + "desper": 10144, + "desperate": 15072, + "desperately": 21993, + "despic": 32442, + "despicable": 37158, + "despite": 5325, + "dess": 7096, + "dess": 10001, + "dessert": 9753, + "desserts": 22948, + "desses": 43913, + "dest": 6540, + "dest": 4549, + "destin": 4934, + "destination": 32191, + "destination": 9179, + "destinations": 16981, + "destined": 28525, + "destiny": 39875, + "destiny": 10867, + "destro": 8287, + "destroy": 8308, + "destroy": 11930, + "destroyed": 9965, + "destroyer": 25291, + "destroying": 19613, + "destroys": 27634, + "destruc": 22945, + "destruction": 14281, + "destructive": 29591, + "det": 28966, + "det": 15366, + "deta": 1914, + "detached": 26252, + "detail": 7657, + "detailed": 12609, + "detailing": 23163, + "details": 2353, + "detained": 20260, + "dete": 5606, + "detec": 17991, + "detect": 22744, + "detected": 26988, + "detecting": 41290, + "detection": 16220, + "detective": 13672, + "detectives": 27994, + "detector": 27689, + "detectors": 45063, + "detention": 16908, + "deter": 10742, + "deter": 47458, + "detergent": 46726, + "deterior": 28512, + "determin": 8325, + "determination": 17410, + "determine": 16768, + "determined": 14371, + "determines": 42192, + "determining": 39884, + "deth": 38375, + "deto": 39710, + "deton": 39335, + "detour": 31211, + "detox": 22459, + "detri": 47951, + "detro": 6210, + "detroit": 19404, + "detroit": 7073, + "detta": 45438, + "dette": 35750, + "deu": 21457, + "deuce": 45332, + "deus": 37625, + "deut": 14970, + "deutsch": 30389, + "deutsche": 32760, + "deutschland": 36878, + "deux": 47089, + "dev": 2797, + "dev": 3670, + "deva": 45179, + "devan": 37072, + "devast": 12913, + "devastated": 29865, + "devastating": 19280, + "devastation": 42452, + "devel": 1820, + "develop": 1966, + "develop": 7708, + "developed": 8763, + "developer": 10929, + "developers": 13248, + "developing": 8131, + "development": 2855, + "developmental": 29347, + "developments": 17393, + "develops": 29895, + "deven": 45537, + "devgn": 29871, + "devi": 12926, + "devi": 20717, + "deviant": 25593, + "deviantart": 26046, + "device": 8163, + "devices": 9067, + "devil": 8894, + "devil": 8043, + "deville": 34329, + "devils": 11683, + "devin": 31193, + "devin": 20996, + "devine": 33019, + "devlin": 48040, + "devo": 11861, + "devo": 43444, + "devon": 16205, + "devon": 10046, + "devops": 21504, + "devos": 40646, + "devote": 37777, + "devoted": 24561, + "devotees": 39759, + "devotion": 25821, + "devotional": 35456, + "devs": 27374, + "dew": 31952, + "dew": 16358, + "dewey": 40399, + "dex": 10030, + "dex": 13790, + "dexpo": 42502, + "dexter": 45049, + "dexter": 22781, + "dey": 11829, + "dez": 23190, + "dez": 8122, + "df": 12908, + "df": 10468, + "dfc": 41903, + "dfs": 32880, + "dfw": 20439, + "dg": 2394, + "dg": 9742, + "dgate": 41684, + "dge": 4016, + "dge": 1360, + "dged": 11830, + "dgeon": 45655, + "dgers": 8733, + "dges": 5432, + "dging": 9565, + "dh": 6669, + "dh": 9960, + "dha": 11629, + "dha": 27377, + "dhabi": 22349, + "dhaka": 32877, + "dham": 29635, + "dham": 30838, + "dhan": 12542, + "dhan": 28569, + "dhanush": 26162, + "dhanush": 36200, + "dhanushkraja": 29266, + "dhar": 12397, + "dharma": 30536, + "dhary": 28706, + "dhawan": 44699, + "dhe": 29706, + "dheim": 44280, + "dhi": 31553, + "dhi": 26166, + "dho": 37834, + "dhoni": 25698, + "dhru": 40257, + "dhry": 39960, + "dhs": 26849, + "dhu": 32387, + "di": 570, + "di": 1618, + "dia": 7351, + "dia": 3357, + "diab": 15954, + "diabe": 19167, + "diabete": 43826, + "diabetes": 10319, + "diabetic": 30230, + "diablo": 23931, + "diag": 6851, + "diagno": 7736, + "diagnose": 44429, + "diagnosed": 16979, + "diagnosis": 15715, + "diagnostic": 26351, + "diagnostics": 37723, + "diagram": 22697, + "dial": 18416, + "dial": 11381, + "dialo": 30709, + "dialog": 48945, + "dialogue": 11288, + "dialogues": 40330, + "dialysis": 44798, + "diam": 4347, + "diameter": 27189, + "diamon": 8873, + "diamond": 18535, + "diamond": 6235, + "diamonds": 12687, + "dian": 16021, + "dian": 4998, + "diana": 12803, + "diane": 15855, + "dianne": 42299, + "dians": 21041, + "diaper": 34382, + "diapers": 39659, + "diar": 25932, + "diaries": 15541, + "diary": 10380, + "dias": 22137, + "dias": 29354, + "diaspora": 28390, + "diaz": 17688, + "dic": 1404, + "dic": 6717, + "dicap": 30023, + "dicaprio": 30755, + "dice": 14406, + "dick": 14413, + "dick": 9554, + "dickens": 33421, + "dict": 45360, + "dict": 15159, + "dictat": 26156, + "dictator": 27399, + "dictatorship": 37989, + "dictionary": 19699, + "did": 1861, + "did": 1335, + "diddy": 33527, + "didi": 34396, + "didier": 45614, + "didn": 2376, + "didnt": 13057, + "dido": 31725, + "didyou": 12295, + "didyouknow": 12506, + "die": 3150, + "die": 2082, + "diec": 27729, + "diecast": 37936, + "died": 3622, + "diego": 30940, + "diego": 6306, + "diem": 45571, + "dience": 33686, + "dient": 27231, + "dier": 29702, + "dier": 16394, + "dies": 20104, + "dies": 1862, + "diesel": 46312, + "diesel": 10591, + "diest": 45739, + "diet": 21295, + "diet": 6582, + "dietary": 29009, + "dietrich": 47005, + "diets": 35173, + "dif": 18656, + "dif": 48731, + "diff": 44073, + "diff": 20331, + "diffe": 1967, + "differ": 34620, + "differen": 14903, + "difference": 4731, + "differences": 14003, + "different": 2731, + "differenti": 21729, + "differential": 34027, + "differentiate": 49032, + "differently": 18325, + "diffic": 6140, + "difficult": 7405, + "difficulties": 23468, + "difficulty": 25245, + "diffu": 31603, + "diffuser": 49400, + "dig": 1831, + "dig": 9887, + "dige": 17820, + "digest": 20413, + "digestion": 40533, + "digestive": 32304, + "digg": 43240, + "digger": 35919, + "diggin": 48466, + "digging": 14971, + "digi": 15627, + "digi": 39361, + "digimon": 44181, + "digit": 14899, + "digit": 27472, + "digital": 4704, + "digital": 2794, + "digitalart": 16987, + "digitalhealth": 32190, + "digitalindia": 46630, + "digitally": 27543, + "digitalmarketing": 15299, + "digitaltransformation": 20047, + "digiti": 25935, + "digits": 31710, + "digni": 45532, + "dignit": 39497, + "dignity": 17744, + "digo": 35701, + "digs": 26877, + "dih": 43089, + "dii": 32755, + "dijk": 44444, + "dik": 38854, + "dik": 37747, + "dike": 42683, + "dil": 7643, + "dil": 17942, + "dile": 25428, + "dilemma": 29787, + "dilig": 30664, + "dill": 12318, + "dill": 27206, + "dillon": 21056, + "dilu": 45242, + "dim": 19576, + "dim": 17523, + "dime": 24443, + "dimen": 10935, + "dimension": 20479, + "dimensional": 25252, + "dimensions": 25086, + "diment": 43500, + "dimes": 44888, + "dimini": 37459, + "dimit": 22250, + "dimitri": 48840, + "dimp": 38853, + "din": 1462, + "din": 5673, + "dina": 36815, + "dinah": 30903, + "dine": 20951, + "dine": 12989, + "diner": 16963, + "dinesh": 48341, + "ding": 7545, + "ding": 796, + "dinger": 45580, + "dingh": 48064, + "dings": 5473, + "dington": 24804, + "dinho": 47370, + "dini": 20196, + "dining": 8658, + "dinner": 27548, + "dinner": 2571, + "dinners": 33570, + "dino": 9692, + "dino": 14077, + "dinosa": 18955, + "dinosaur": 15095, + "dinosaurs": 20387, + "dio": 3779, + "dio": 1521, + "dioce": 20763, + "diocese": 27091, + "dion": 42899, + "dion": 16250, + "dior": 23655, + "dios": 37563, + "dious": 27417, + "dioxide": 38102, + "dip": 19918, + "dip": 11343, + "dipl": 8490, + "diplo": 38115, + "diplom": 11169, + "diploma": 21251, + "diplomacy": 23798, + "diplomat": 32828, + "diplomatic": 23782, + "diplomats": 44126, + "dipped": 30610, + "dipper": 49317, + "dipping": 33544, + "dips": 37522, + "dir": 4251, + "dir": 8478, + "dire": 38355, + "dire": 25664, + "direc": 1534, + "direct": 43224, + "direct": 6016, + "directed": 8392, + "directing": 21817, + "direction": 15923, + "direction": 5407, + "directional": 38687, + "directioner": 48042, + "directioners": 22055, + "directions": 16440, + "directive": 40630, + "directly": 9701, + "director": 20337, + "director": 2681, + "directorial": 45327, + "directors": 11940, + "directory": 25272, + "directs": 34349, + "directv": 48652, + "dirk": 28171, + "dirt": 31415, + "dirt": 11795, + "dirty": 20127, + "dirty": 7615, + "dis": 1518, + "dis": 6112, + "disa": 3882, + "disab": 47380, + "disabilities": 17350, + "disability": 48986, + "disability": 13261, + "disabled": 13613, + "disadvantaged": 40577, + "disagree": 23199, + "disapp": 5384, + "disappear": 21148, + "disappear": 25173, + "disappearance": 35929, + "disappeared": 23139, + "disappearing": 35819, + "disappears": 44406, + "disappo": 7605, + "disappoint": 25446, + "disappointed": 13794, + "disappointing": 21941, + "disappointment": 23884, + "disappoints": 48545, + "disappro": 48276, + "disar": 42971, + "disaster": 9072, + "disasters": 26976, + "disastrous": 35790, + "disc": 1472, + "disc": 10712, + "discar": 40532, + "discarded": 45197, + "discer": 49140, + "dischar": 22671, + "discharge": 32485, + "disci": 9559, + "discip": 38951, + "discipl": 10467, + "disciples": 39366, + "disciplinary": 20232, + "discipline": 18903, + "disciplines": 42032, + "discla": 40248, + "disclaimer": 46465, + "disclo": 17481, + "disclose": 46379, + "disclosed": 30905, + "disclosure": 26502, + "disco": 2475, + "disco": 11964, + "discography": 47545, + "discomfort": 48054, + "discord": 23582, + "discoun": 18515, + "discount": 7638, + "discounted": 20993, + "discounts": 18186, + "discoura": 45850, + "discourse": 29441, + "discover": 10539, + "discover": 4834, + "discovered": 6986, + "discoveries": 29308, + "discovering": 17967, + "discovers": 29719, + "discovery": 40491, + "discovery": 8027, + "discre": 20616, + "discrimin": 11721, + "discrimination": 14775, + "discs": 29270, + "discu": 1984, + "discus": 41828, + "discuss": 4312, + "discussed": 11300, + "discusses": 8116, + "discussing": 5900, + "discussion": 5060, + "discussions": 13806, + "dise": 4262, + "disease": 5336, + "diseases": 12035, + "disen": 46468, + "disgrace": 29877, + "disgraceful": 44146, + "disgu": 9793, + "disguise": 27803, + "disguised": 37149, + "disgusted": 41977, + "disgusting": 16218, + "dish": 11039, + "dish": 4531, + "disha": 42498, + "dishes": 11412, + "dishon": 30777, + "dishu": 44728, + "dishwasher": 40524, + "disin": 19484, + "disinfe": 48050, + "disintegr": 49275, + "disk": 17970, + "dislike": 30796, + "dism": 30836, + "dism": 38821, + "dismant": 36557, + "dismiss": 43287, + "dismissal": 42068, + "dismissed": 30087, + "dismisses": 45238, + "disney": 6729, + "disney": 4696, + "disneyland": 39481, + "disneyland": 13661, + "disneyworld": 28469, + "diso": 26305, + "disobe": 42841, + "dison": 19310, + "disorder": 12635, + "disorders": 17114, + "disp": 11073, + "dispar": 24633, + "disparities": 45122, + "dispat": 28652, + "dispatch": 26306, + "dispen": 19077, + "dispenser": 40116, + "disper": 34499, + "displa": 9326, + "displac": 17718, + "displaced": 22817, + "displacement": 37931, + "display": 4456, + "displayed": 18967, + "displaying": 26468, + "displays": 15648, + "dispo": 13651, + "dispon": 38872, + "disponible": 46130, + "dispos": 45177, + "disposable": 37275, + "disposal": 28231, + "dispro": 32927, + "dispropor": 40354, + "disproportion": 45492, + "disregard": 43869, + "disrespect": 34055, + "disrespectful": 41723, + "disru": 13763, + "disrup": 14641, + "disrupt": 25214, + "disrupted": 46674, + "disrupting": 42419, + "disruption": 19635, + "disruptive": 31554, + "diss": 10766, + "diss": 35688, + "dissec": 43879, + "dissemin": 40463, + "dissent": 45154, + "disser": 25560, + "dissertation": 29448, + "dissi": 25088, + "dissol": 27398, + "dissuper": 33461, + "dist": 5479, + "dist": 12116, + "distance": 7964, + "distances": 37078, + "distant": 18949, + "distill": 41586, + "distilled": 49179, + "distillery": 22200, + "distin": 11892, + "distinct": 25056, + "distinction": 28183, + "distinctive": 25486, + "distingui": 15053, + "distinguish": 45418, + "distinguished": 16513, + "distor": 23781, + "distortion": 43690, + "distr": 11885, + "distract": 39309, + "distracted": 24049, + "distraction": 32039, + "distress": 26866, + "distressed": 37515, + "distri": 5987, + "distribu": 6138, + "distribute": 32313, + "distributed": 16419, + "distributing": 35216, + "distribution": 10484, + "distributor": 28354, + "distributors": 44240, + "distric": 3208, + "district": 46683, + "district": 3506, + "districts": 17565, + "distur": 11732, + "disturb": 33018, + "disturb": 39449, + "disturbance": 42416, + "disturbed": 29967, + "disturbing": 21476, + "disupdates": 45667, + "dit": 5752, + "dit": 2524, + "dita": 47965, + "ditch": 43715, + "ditch": 19291, + "dited": 40392, + "diti": 2363, + "dition": 16452, + "dition": 3015, + "ditional": 4322, + "ditions": 4503, + "dito": 43705, + "dits": 49374, + "dity": 16436, + "dium": 2903, + "div": 5293, + "div": 14869, + "diva": 13605, + "divas": 23534, + "dive": 26042, + "dive": 9058, + "diver": 13119, + "diver": 22094, + "divergence": 48735, + "divergent": 36132, + "divers": 30241, + "divers": 27038, + "diverse": 11464, + "diversi": 24475, + "diversion": 38457, + "diversity": 35634, + "diversity": 6257, + "diverted": 41049, + "dives": 13893, + "divi": 8375, + "divid": 31337, + "divide": 18842, + "divided": 18689, + "dividend": 32067, + "dividends": 45146, + "dividing": 45605, + "divin": 21838, + "divine": 46919, + "divine": 10976, + "diving": 9886, + "divinity": 39754, + "divisi": 39196, + "division": 5378, + "divisional": 40912, + "divisions": 33715, + "divor": 13543, + "divorce": 17060, + "divorced": 39437, + "divya": 47767, + "diwali": 18218, + "dix": 45838, + "dix": 27620, + "dixie": 24484, + "dixit": 28279, + "dixon": 16086, + "diy": 28472, + "diy": 7845, + "diya": 36459, + "diz": 32740, + "dized": 36232, + "dizz": 40239, + "dizzy": 35464, + "dj": 3761, + "dj": 3723, + "djan": 35338, + "django": 46498, + "dji": 35284, + "dji": 28379, + "djing": 36113, + "djo": 19432, + "djoker": 42721, + "djokernole": 42830, + "djokovic": 27944, + "djs": 18117, + "dk": 20702, + "dk": 16196, + "dl": 12558, + "dl": 9373, + "dlc": 19079, + "dle": 11057, + "dle": 3287, + "dled": 23494, + "dler": 40279, + "dles": 7890, + "dless": 14997, + "dley": 12808, + "dling": 18221, + "dly": 3069, + "dm": 19070, + "dm": 4667, + "dma": 42903, + "dman": 18826, + "dmc": 28991, + "dmit": 31607, + "dmitry": 48326, + "dms": 19955, + "dmv": 27508, + "dmx": 45255, + "dn": 11552, + "dn": 7459, + "dna": 8790, + "dnb": 35422, + "dnc": 20237, + "dnd": 11678, + "dnr": 37051, + "dns": 39245, + "dnt": 26795, + "do": 639, + "do": 818, + "doa": 48332, + "dob": 29640, + "doba": 35605, + "dobbs": 43006, + "dobson": 46888, + "doc": 3009, + "doc": 7251, + "doch": 25101, + "dock": 17311, + "dock": 8997, + "docked": 46784, + "docker": 31152, + "docking": 40845, + "docks": 24091, + "docs": 15157, + "doctor": 7872, + "doctor": 5547, + "doctoral": 23649, + "doctorate": 39134, + "doctors": 9705, + "doctorwho": 12996, + "doctr": 28497, + "doctrine": 35612, + "docu": 4433, + "document": 29293, + "document": 15121, + "documentaries": 44209, + "documentary": 7881, + "documentation": 31560, + "documented": 22310, + "documenting": 37876, + "documents": 14105, + "dod": 13847, + "dod": 30187, + "dodd": 36748, + "dodge": 31263, + "dodge": 12093, + "dodgeball": 43244, + "dodger": 31641, + "dodgers": 12422, + "dodgy": 37727, + "doe": 13296, + "does": 2397, + "does": 1897, + "doesn": 2503, + "doesnt": 17937, + "dof": 8277, + "doff": 20193, + "dofficial": 42516, + "dog": 4326, + "dog": 1929, + "dogcelebration": 41819, + "dogday": 27475, + "doge": 42187, + "dogg": 20749, + "doggie": 32237, + "doggo": 42155, + "doggy": 26359, + "doglo": 40733, + "dogre": 40030, + "dogrescue": 44158, + "dogs": 42182, + "dogs": 3255, + "dogsoftwitter": 19415, + "doh": 23581, + "doha": 20908, + "doherty": 31774, + "doi": 36361, + "doin": 15412, + "doing": 37408, + "doing": 1960, + "doit": 32272, + "doit": 28109, + "doj": 25700, + "dojo": 35901, + "dok": 40547, + "dok": 41034, + "doka": 46528, + "dol": 2287, + "dol": 19170, + "dola": 38005, + "dolan": 27200, + "dolby": 42414, + "dolce": 30033, + "dolce": 30661, + "dole": 41040, + "doll": 27031, + "doll": 9286, + "dollar": 35092, + "dollar": 7474, + "dollars": 10669, + "dolls": 15090, + "dolly": 43281, + "dolly": 23821, + "dolom": 37137, + "dolores": 40741, + "dolph": 8900, + "dolph": 22257, + "dolphin": 42963, + "dolphin": 16464, + "dolphins": 14002, + "dom": 2164, + "dom": 1919, + "domain": 15492, + "domaine": 48744, + "domains": 36358, + "dome": 8515, + "dome": 9827, + "domen": 37584, + "domest": 21936, + "domestic": 28189, + "domestic": 9043, + "domin": 4361, + "dominance": 30546, + "dominant": 20565, + "dominate": 21431, + "dominated": 23048, + "dominates": 34043, + "dominating": 29303, + "domination": 30919, + "domingo": 24882, + "dominic": 39007, + "dominic": 19095, + "dominican": 22934, + "dominion": 27155, + "domino": 30752, + "dominos": 39770, + "domo": 44293, + "doms": 30126, + "don": 1067, + "don": 847, + "dona": 26789, + "donal": 42375, + "donald": 5990, + "donald": 4335, + "donaldson": 37783, + "donaldtrump": 6652, + "donat": 36384, + "donate": 6429, + "donated": 8705, + "donates": 26960, + "donating": 12621, + "donation": 7924, + "donations": 9928, + "doncaster": 38008, + "doncaster": 25352, + "doncasterisgreat": 47333, + "done": 5136, + "done": 1700, + "donegal": 24172, + "donesia": 41281, + "donet": 33724, + "donetsk": 33999, + "dong": 26242, + "dong": 31478, + "dongha": 28365, + "donghae": 28945, + "donia": 24014, + "donkey": 21415, + "donkeys": 44644, + "donna": 9158, + "donne": 30897, + "donnein": 38308, + "donneinarte": 40193, + "donnell": 35118, + "donnelly": 39070, + "donnie": 47058, + "donnie": 30609, + "donny": 37291, + "donny": 32887, + "dono": 14840, + "donor": 18013, + "donors": 17887, + "donovan": 21499, + "dons": 22127, + "dont": 8094, + "dont": 4632, + "donut": 18471, + "donuts": 13970, + "doo": 4543, + "doo": 11643, + "doodle": 9388, + "doodled": 41030, + "doodles": 22156, + "doodling": 37548, + "dooley": 47609, + "doom": 23263, + "doom": 14344, + "doomed": 33251, + "doomsday": 41791, + "doon": 36612, + "doop": 33886, + "door": 7188, + "door": 2489, + "doors": 4228, + "doorstep": 19533, + "doorway": 46575, + "dop": 42381, + "dop": 31722, + "dope": 42587, + "dope": 10094, + "doping": 30285, + "dopp": 21774, + "doppelg": 45216, + "doppler": 42540, + "dor": 2766, + "dor": 8695, + "dora": 18104, + "dorado": 32350, + "dorchester": 32656, + "dore": 39423, + "dores": 34323, + "dorf": 17296, + "dori": 49270, + "doria": 43186, + "dorian": 44016, + "doris": 24285, + "dork": 36206, + "dorm": 24263, + "doro": 15498, + "doro": 37389, + "dorothy": 20805, + "dors": 31240, + "dorset": 42109, + "dorset": 16047, + "dorsey": 41607, + "dortmund": 24290, + "dory": 36135, + "dos": 44258, + "dos": 5474, + "dose": 11497, + "doses": 37873, + "dossier": 46042, + "dost": 44222, + "dot": 7473, + "dot": 7004, + "dota": 23085, + "dotcom": 12443, + "dote": 31202, + "dothis": 47864, + "dotnet": 43124, + "dotorg": 46587, + "dots": 19019, + "dotted": 47950, + "dou": 1756, + "dou": 23608, + "doub": 19631, + "double": 13013, + "double": 3200, + "doubled": 24948, + "doubleheader": 34668, + "doubles": 12539, + "doubling": 36850, + "doubt": 37071, + "doubt": 8671, + "doubts": 30894, + "douche": 44292, + "doug": 20271, + "doug": 10758, + "dough": 15785, + "dough": 14983, + "doughnut": 32555, + "doughnuts": 31124, + "dougie": 46317, + "dougla": 9140, + "douglas": 10065, + "douglass": 45692, + "doun": 44785, + "dov": 38856, + "dova": 26551, + "dove": 27511, + "dove": 18281, + "dover": 43019, + "dover": 14683, + "doves": 47067, + "dow": 8022, + "dow": 10688, + "dowell": 27344, + "down": 1833, + "down": 1136, + "downe": 46501, + "downed": 35814, + "downer": 42522, + "downers": 43739, + "downey": 29429, + "downfall": 48702, + "downhill": 27387, + "downing": 28140, + "download": 35076, + "download": 3794, + "downloadable": 49105, + "downloaded": 22961, + "downloading": 30519, + "downloads": 26481, + "downpour": 39034, + "downpours": 40160, + "downs": 10706, + "downside": 41937, + "downstairs": 28174, + "downstream": 43822, + "downtime": 41964, + "downton": 45023, + "downton": 42668, + "downtown": 18230, + "downtown": 5061, + "downward": 37430, + "dowski": 43556, + "dox": 44786, + "dox": 14510, + "doyle": 17728, + "doyou": 27256, + "doz": 31106, + "dozen": 16401, + "dozens": 17883, + "dp": 23820, + "dp": 6465, + "dprint": 46644, + "dprinting": 16194, + "dprk": 47920, + "dps": 34288, + "dq": 28741, + "dr": 1084, + "dr": 1701, + "dra": 1114, + "dra": 7402, + "drac": 20168, + "dracing": 41253, + "dracula": 25405, + "draf": 37426, + "draft": 30624, + "draft": 5198, + "drafted": 19129, + "drafting": 33528, + "drafts": 29194, + "drag": 8452, + "drag": 12463, + "dragged": 27884, + "dragging": 37069, + "dragon": 9187, + "dragon": 5471, + "dragonball": 40959, + "dragoncon": 47802, + "dragonfly": 32824, + "dragons": 10203, + "dragrace": 40762, + "drags": 45368, + "drain": 23347, + "drain": 19467, + "drainage": 25953, + "drained": 44630, + "drains": 43638, + "drainthe": 47337, + "drake": 32504, + "drake": 8958, + "dral": 7503, + "dram": 6937, + "dram": 32170, + "drama": 5055, + "dramas": 33467, + "dramati": 43512, + "dramatic": 11240, + "dramatically": 24495, + "drank": 21712, + "draped": 49113, + "drastic": 43159, + "drastically": 35478, + "drau": 18621, + "draw": 17675, + "draw": 4001, + "drawer": 23219, + "drawers": 38975, + "drawing": 36996, + "drawing": 3610, + "drawings": 13397, + "drawn": 8893, + "draws": 12043, + "dray": 25562, + "drayton": 49044, + "drc": 21434, + "dre": 960, + "dre": 14584, + "dread": 17412, + "dread": 31403, + "dreaded": 47227, + "dreadful": 35846, + "dreality": 48367, + "dream": 4595, + "dream": 2984, + "dreambig": 46495, + "dreamcast": 47226, + "dreamed": 27984, + "dreamer": 25692, + "dreamers": 27194, + "dreaming": 11662, + "dreamliner": 49143, + "dreams": 4405, + "dreamt": 43743, + "dreamteam": 40090, + "dreamy": 23517, + "dred": 10903, + "dredge": 48783, + "dren": 29068, + "dren": 47309, + "drenched": 46378, + "dres": 48852, + "dres": 44697, + "dresden": 34836, + "dress": 12622, + "dress": 2595, + "dressage": 36144, + "dressed": 6559, + "dresser": 26346, + "dresses": 8184, + "dressing": 6348, + "drew": 18792, + "drew": 5281, + "drex": 33985, + "drey": 48271, + "dri": 1203, + "dri": 28833, + "drian": 36870, + "dribb": 42153, + "dric": 23448, + "dridge": 22956, + "drie": 40170, + "dried": 16037, + "drier": 39877, + "dries": 33857, + "drif": 33585, + "drift": 18194, + "drifting": 30276, + "drill": 11626, + "drilled": 46338, + "drilling": 18634, + "drills": 24378, + "drin": 3375, + "drin": 47133, + "drink": 14131, + "drink": 3979, + "drinking": 5778, + "drinklocal": 45998, + "drinks": 6732, + "drip": 24050, + "dripping": 38787, + "dris": 35804, + "drive": 11402, + "drive": 2620, + "driven": 9314, + "driver": 27563, + "driver": 4383, + "driverless": 46769, + "drivers": 7384, + "drives": 11441, + "driveway": 26273, + "driving": 37800, + "driving": 4161, + "drizzle": 28240, + "drm": 39674, + "dro": 1494, + "dro": 12442, + "drogba": 49199, + "droid": 38016, + "drome": 9157, + "dron": 43898, + "dron": 23360, + "drone": 33557, + "drone": 9397, + "drones": 14006, + "droo": 30715, + "drool": 41554, + "drooling": 44360, + "drop": 16407, + "drop": 3387, + "dropbox": 47216, + "dropped": 6792, + "dropping": 8339, + "drops": 6437, + "dros": 47033, + "drou": 38558, + "drought": 13935, + "drove": 13753, + "drow": 21159, + "drown": 28571, + "drowned": 34005, + "drowning": 24618, + "drs": 21257, + "dru": 2275, + "dru": 49048, + "drug": 20601, + "drug": 5600, + "drugs": 8021, + "druid": 40297, + "drum": 13353, + "drum": 8698, + "drummer": 13618, + "drummers": 46191, + "drumming": 35480, + "drummond": 42213, + "drums": 11690, + "drun": 15488, + "drunk": 37398, + "drunk": 8232, + "drunken": 28196, + "drupal": 46481, + "drush": 43009, + "drwho": 48342, + "dry": 13544, + "dry": 4501, + "dryer": 24425, + "drying": 23203, + "ds": 3361, + "ds": 646, + "dsa": 47607, + "dsb": 47168, + "dsb": 14257, + "dsburg": 47237, + "dsc": 37240, + "dsd": 45383, + "dsley": 40740, + "dslr": 33740, + "dsm": 39502, + "dson": 40310, + "dsp": 45291, + "dss": 41580, + "dstv": 35027, + "dt": 13104, + "dt": 7427, + "dthe": 13863, + "dtla": 31885, + "dtm": 42407, + "dts": 46233, + "du": 691, + "du": 3686, + "dua": 25244, + "dual": 39739, + "dual": 5347, + "duane": 38946, + "dub": 14526, + "dub": 13144, + "duba": 5485, + "dubai": 32599, + "dubai": 5985, + "dubbed": 27740, + "dublin": 20707, + "dublin": 6145, + "dubnation": 47329, + "dubois": 48046, + "dubrov": 46709, + "dubrovnik": 48724, + "dubs": 27013, + "dubstep": 38303, + "dubu": 43257, + "duc": 979, + "duc": 36446, + "ducati": 28570, + "ducation": 17197, + "duce": 3660, + "duchess": 21713, + "duck": 12708, + "duck": 6910, + "ducks": 11202, + "duct": 26829, + "dude": 48087, + "dude": 5710, + "dudes": 14449, + "dudley": 27324, + "due": 2887, + "duel": 27143, + "dues": 37646, + "duet": 25457, + "duf": 38713, + "duff": 38071, + "duff": 21934, + "duffy": 23599, + "dug": 22743, + "dug": 21000, + "dugg": 40523, + "duggan": 46169, + "dugout": 36831, + "duh": 26716, + "dui": 29693, + "duk": 14160, + "duke": 18402, + "duke": 7732, + "dukes": 27914, + "dul": 6738, + "dulce": 44872, + "dulil": 32565, + "dulkar": 47980, + "dull": 19433, + "dulu": 28865, + "duluth": 32109, + "dulwich": 47343, + "dum": 13400, + "dum": 11564, + "dumb": 15901, + "dumb": 12464, + "dumbass": 38980, + "dummies": 40899, + "dummy": 34246, + "dump": 12655, + "dump": 17146, + "dumped": 23768, + "dumping": 31707, + "dumplings": 35495, + "dumps": 45804, + "dumpster": 45467, + "dun": 2616, + "dun": 18284, + "dunbar": 41453, + "duncan": 31084, + "duncan": 13502, + "dundal": 38185, + "dundas": 39300, + "dundee": 18619, + "dune": 32833, + "dune": 28208, + "dunedin": 40121, + "dunes": 23526, + "dung": 33712, + "dungeon": 28812, + "dungeon": 22931, + "dungeons": 42572, + "dungeonsand": 34970, + "dungeonsanddragons": 35497, + "dunham": 42501, + "duni": 43454, + "dunk": 17222, + "dunkin": 48022, + "dunkin": 36415, + "dunkirk": 46928, + "dunks": 48977, + "dunlop": 34753, + "dunn": 19185, + "dunne": 38538, + "dunno": 24502, + "duo": 8696, + "dup": 36805, + "dup": 10445, + "duper": 44850, + "duplex": 41186, + "duplic": 28992, + "dupont": 35994, + "dur": 4355, + "dur": 23230, + "dura": 28173, + "dura": 47382, + "durability": 43671, + "durable": 22285, + "duran": 28185, + "durango": 44443, + "durant": 24861, + "duras": 27518, + "duration": 31663, + "durban": 24474, + "dure": 19108, + "durga": 38456, + "durham": 26765, + "durham": 14335, + "during": 1590, + "dus": 9931, + "dusa": 28546, + "dusk": 19708, + "dust": 29723, + "dust": 8349, + "dusted": 38274, + "duster": 46280, + "dustin": 42423, + "dustin": 21235, + "dusting": 41756, + "dusty": 22029, + "dut": 32625, + "dutch": 22277, + "dutch": 7991, + "duter": 21624, + "duterte": 22371, + "duties": 19603, + "dutt": 30081, + "dutton": 42771, + "duty": 6458, + "duval": 42459, + "duvet": 48006, + "dux": 28562, + "dv": 4288, + "dv": 26265, + "dvd": 7170, + "dvds": 36655, + "dvn": 29811, + "dvr": 29210, + "dw": 8455, + "dw": 19997, + "dwar": 13487, + "dwarf": 22643, + "dwayne": 31395, + "dwell": 27549, + "dwell": 18755, + "dwelling": 37098, + "dwight": 22473, + "dwp": 46976, + "dwts": 30220, + "dwyer": 43878, + "dx": 22717, + "dx": 15679, + "dy": 1444, + "dy": 907, + "dyce": 48325, + "dye": 37159, + "dye": 15997, + "dyed": 24906, + "dyer": 29495, + "dyes": 39874, + "dying": 5115, + "dyk": 12142, + "dyke": 32632, + "dylan": 21004, + "dylan": 9900, + "dyn": 44289, + "dyn": 30669, + "dynam": 5735, + "dynamic": 10057, + "dynamics": 14329, + "dynamite": 29003, + "dynamo": 28281, + "dynasty": 14593, + "dyne": 42756, + "dyou": 11484, + "dyour": 22525, + "dys": 11022, + "dys": 38384, + "dysfunction": 36865, + "dysfunctional": 40757, + "dysle": 33681, + "dyslexia": 43199, + "dyson": 34475, + "dyssey": 17435, + "dystop": 28276, + "dystopian": 38915, + "dz": 24421, + "dz": 22913, + "dé": 25466, + "dü": 46948, + "dÃŃ": 46988, + "e": 68, + "e": 324, + "ea": 2150, + "ea": 8100, + "eable": 20693, + "each": 31442, + "each": 2416, + "eachother": 40792, + "ead": 42556, + "ead": 45523, + "eae": 27446, + "eag": 3743, + "eager": 21551, + "eagerly": 30094, + "eagle": 20207, + "eagle": 7517, + "eagles": 6920, + "eal": 48872, + "ealing": 40484, + "eames": 49072, + "eamon": 45954, + "ean": 13327, + "ear": 1055, + "ear": 8373, + "earbuds": 47807, + "eared": 9127, + "earl": 30573, + "earl": 14235, + "earle": 40292, + "earlier": 4297, + "earliest": 22097, + "early": 15840, + "early": 2090, + "earn": 33977, + "earn": 8465, + "earned": 8898, + "earnest": 45422, + "earning": 14550, + "earnings": 15912, + "earns": 16760, + "earp": 35296, + "earphones": 44905, + "earring": 28664, + "earrings": 9136, + "ears": 9861, + "eart": 7086, + "earth": 5184, + "earth": 3475, + "earthand": 34229, + "earthandclouds": 34480, + "earthday": 19481, + "earthquake": 10060, + "earthquakes": 32895, + "earthy": 47139, + "earts": 38824, + "eas": 5740, + "ease": 13574, + "easier": 8817, + "easiest": 26314, + "easily": 8197, + "easing": 44825, + "easport": 42251, + "east": 5022, + "east": 2602, + "eastbound": 28827, + "eastbourne": 38455, + "eastenders": 23545, + "easter": 14783, + "easter": 4811, + "eastern": 34522, + "eastern": 6311, + "eastman": 48280, + "easton": 29619, + "eastside": 42650, + "eastwood": 28270, + "easy": 18308, + "easy": 3176, + "eat": 5418, + "eat": 3384, + "eaten": 16750, + "eater": 24060, + "eaters": 37645, + "eatery": 46559, + "eating": 4371, + "eatlocal": 42868, + "eaton": 28462, + "eats": 13188, + "eau": 17608, + "eazy": 36536, + "eb": 12283, + "eb": 8677, + "eba": 40889, + "ebay": 34412, + "ebay": 4099, + "eber": 34020, + "ebo": 46635, + "ebola": 15864, + "ebon": 22013, + "ebony": 30651, + "ebook": 13122, + "ebooks": 25774, + "ec": 747, + "ec": 10879, + "eca": 18465, + "ecar": 34500, + "ecb": 26205, + "ecc": 33128, + "eccc": 47401, + "eccentric": 43228, + "eccle": 27494, + "ece": 2163, + "eces": 5905, + "ecg": 45983, + "ech": 15797, + "ech": 31147, + "echel": 41233, + "echo": 17366, + "echo": 13989, + "echoes": 32564, + "eci": 31936, + "eck": 25866, + "eck": 15969, + "ecker": 39661, + "ecker": 40890, + "ecla": 47806, + "eclec": 25114, + "eclectic": 28382, + "eclip": 30841, + "eclipse": 11505, + "eclub": 38983, + "eco": 5106, + "eco": 10077, + "ecofriendly": 43412, + "ecol": 22706, + "ecological": 25127, + "ecology": 18578, + "ecommerce": 15529, + "econ": 26755, + "econ": 21158, + "econom": 2768, + "economic": 36649, + "economic": 5259, + "economical": 48782, + "economically": 39406, + "economics": 12625, + "economies": 27136, + "economist": 18836, + "economists": 43701, + "economy": 5644, + "ecor": 28962, + "ecosystem": 15788, + "ecosystems": 28725, + "ecoun": 27924, + "ecr": 48572, + "ecraft": 11439, + "ecs": 23485, + "ecstasy": 47286, + "ecstatic": 36244, + "ect": 25168, + "ecu": 13087, + "ecu": 32919, + "ecuador": 19813, + "ecz": 43530, + "ed": 843, + "ed": 538, + "eda": 10804, + "edad": 44724, + "eday": 39258, + "edc": 21245, + "edchat": 14702, + "edd": 35431, + "eddi": 42930, + "eddie": 22748, + "eddie": 9517, + "eddy": 25959, + "ede": 29632, + "eded": 19555, + "edel": 20460, + "edelman": 48139, + "eden": 23621, + "eden": 13741, + "eder": 16249, + "edes": 36247, + "edfringe": 27402, + "edg": 35955, + "edgar": 33543, + "edgar": 17914, + "edge": 16914, + "edge": 5461, + "edged": 39188, + "edges": 20938, + "edgy": 35393, + "edi": 8750, + "edi": 27148, + "edible": 19795, + "edic": 25184, + "edics": 30641, + "edin": 6524, + "edinburgh": 27574, + "edinburgh": 8068, + "eding": 5742, + "edison": 25846, + "edit": 8239, + "edit": 8013, + "edited": 13945, + "edith": 28597, + "editing": 10178, + "edition": 3062, + "editions": 21664, + "editor": 7661, + "editorial": 12325, + "editors": 19486, + "edits": 24945, + "edm": 37843, + "edm": 13539, + "edmon": 11275, + "edmond": 41581, + "edmonds": 46520, + "edmonton": 37311, + "edmonton": 15058, + "edmun": 36561, + "edmund": 27567, + "edna": 39002, + "edo": 29145, + "edo": 18096, + "edon": 41467, + "edor": 30184, + "edou": 47678, + "edp": 46066, + "eds": 1941, + "edsheeran": 30386, + "edt": 15071, + "edtech": 41825, + "edtech": 15262, + "edu": 11757, + "edu": 11799, + "eduardo": 30604, + "educ": 2200, + "educate": 17563, + "educated": 21447, + "education": 22358, + "education": 2806, + "educational": 10400, + "educator": 19875, + "educators": 15420, + "edwar": 27586, + "edward": 26184, + "edward": 7450, + "edwards": 12627, + "edwin": 48718, + "edwin": 22471, + "edy": 17072, + "edy": 4144, + "ee": 2644, + "ee": 4708, + "eed": 17513, + "eee": 24632, + "eee": 9361, + "eeee": 11696, + "eeee": 17570, + "eeeee": 26938, + "eeeeee": 41407, + "eek": 46591, + "eel": 27462, + "eels": 44416, + "eem": 27236, + "een": 47490, + "een": 21230, + "eer": 35409, + "eer": 31846, + "eera": 36664, + "eerie": 33846, + "ees": 40308, + "eet": 48935, + "eez": 39033, + "ef": 1490, + "ef": 1829, + "efa": 16999, + "eface": 48804, + "efan": 33556, + "efc": 22065, + "efcc": 46087, + "efer": 26199, + "eff": 20548, + "eff": 21715, + "effe": 2808, + "effec": 3943, + "effect": 5436, + "effective": 6837, + "effectively": 17516, + "effectiveness": 26847, + "effects": 7331, + "effic": 36004, + "efficacy": 39937, + "effici": 6670, + "efficiency": 11823, + "efficient": 11334, + "efficiently": 32915, + "effor": 6356, + "effort": 40078, + "effort": 6255, + "effortless": 41639, + "effortlessly": 42320, + "efforts": 6847, + "efish": 35813, + "efl": 27172, + "efron": 48111, + "efs": 7389, + "eg": 8053, + "eg": 14599, + "ega": 41193, + "egan": 42943, + "eger": 46704, + "eger": 22767, + "egg": 13778, + "egg": 5911, + "eggplant": 34906, + "eggs": 7099, + "ego": 34712, + "ego": 14250, + "egos": 43992, + "egre": 27044, + "egret": 42002, + "egy": 5224, + "egyp": 10250, + "egypt": 7267, + "egyptian": 12428, + "eh": 9277, + "eh": 9135, + "eha": 48563, + "ehealth": 48617, + "ehr": 45271, + "ehs": 44648, + "ei": 4006, + "ei": 18264, + "eic": 40251, + "eid": 28038, + "eid": 13979, + "eidmubarak": 46275, + "eiffel": 29720, + "eigh": 13468, + "eight": 7910, + "eighteen": 49316, + "eighth": 21237, + "eighty": 47449, + "eil": 29457, + "eileen": 31468, + "ein": 29944, + "ein": 24524, + "eindhoven": 47172, + "eing": 7702, + "einstein": 20587, + "eira": 47708, + "eis": 13802, + "eisen": 25273, + "eisenhower": 35562, + "either": 6036, + "ej": 19887, + "ej": 25009, + "ejec": 29771, + "ek": 4212, + "ek": 2092, + "el": 544, + "el": 832, + "ela": 11284, + "ela": 3787, + "elab": 38866, + "elabor": 26034, + "elaborate": 33855, + "elaine": 22523, + "elan": 17763, + "elan": 18399, + "eland": 24930, + "eland": 6275, + "elas": 41078, + "elast": 27479, + "elastic": 30282, + "elba": 48598, + "elbow": 21965, + "eld": 5684, + "elder": 11791, + "elder": 14416, + "elderly": 15455, + "elders": 28617, + "eldest": 33503, + "elding": 28223, + "elds": 13466, + "ele": 2084, + "ele": 9766, + "eleague": 36577, + "eleanor": 18604, + "elearning": 29969, + "elec": 1564, + "elec": 38768, + "elect": 15336, + "elected": 8828, + "election": 19312, + "election": 4247, + "electionday": 40540, + "elections": 6949, + "elector": 16465, + "electoral": 19544, + "electr": 3654, + "electra": 48959, + "electri": 23927, + "electric": 19547, + "electric": 5031, + "electrical": 12176, + "electrician": 46422, + "electricity": 10950, + "electrifying": 48843, + "electro": 11648, + "electro": 23244, + "electromagnetic": 46530, + "electron": 33396, + "electronic": 33865, + "electronic": 9273, + "electronica": 43119, + "electronics": 13081, + "eled": 20357, + "elee": 44112, + "eleg": 8075, + "elegance": 19146, + "elegant": 11124, + "elek": 34559, + "elem": 25406, + "element": 14909, + "elementary": 8143, + "elements": 10925, + "elen": 30654, + "elen": 39164, + "elena": 19421, + "eleng": 48180, + "eleph": 7554, + "elephant": 10299, + "elephants": 16871, + "eler": 24646, + "eless": 15244, + "eless": 30837, + "elets": 19400, + "elev": 7921, + "elevate": 26736, + "elevated": 23967, + "elevation": 23826, + "elevator": 19021, + "eleven": 31617, + "eleven": 17795, + "elf": 45961, + "elf": 11924, + "elfie": 39955, + "elg": 28790, + "elgin": 31868, + "eli": 1018, + "eli": 6292, + "elia": 10956, + "elian": 42508, + "elias": 47274, + "elias": 29902, + "elic": 34743, + "elic": 13492, + "elie": 38677, + "elie": 26501, + "elier": 14634, + "elife": 37429, + "elife": 12719, + "eligibility": 34937, + "eligible": 16978, + "elijah": 26065, + "elike": 48913, + "elim": 9296, + "elimin": 11386, + "eliminate": 19655, + "eliminated": 29075, + "eliminating": 36619, + "elimination": 24176, + "elin": 25353, + "elin": 13458, + "eline": 46199, + "eline": 7153, + "eling": 9990, + "elio": 47943, + "elion": 30682, + "elions": 44159, + "eliot": 33326, + "elis": 23411, + "elis": 48021, + "elisa": 25610, + "elisa": 44051, + "elisabeth": 33127, + "elise": 27124, + "elit": 40882, + "elite": 32277, + "elite": 6553, + "elited": 43943, + "elitedangerous": 47138, + "elites": 35975, + "elius": 35623, + "elive": 49338, + "elive": 23505, + "elives": 49174, + "elix": 32926, + "elixir": 42887, + "eliz": 42844, + "eliza": 6132, + "eliza": 29992, + "elizabeth": 22397, + "elizabeth": 7026, + "elk": 34013, + "elk": 21896, + "ell": 826, + "ell": 812, + "ella": 20692, + "ella": 2957, + "elland": 43326, + "ellar": 38443, + "ellas": 37053, + "elle": 12818, + "elle": 4765, + "elled": 13146, + "ellen": 14007, + "ellen": 12312, + "ellenshow": 34812, + "eller": 20927, + "eller": 4465, + "ellers": 19010, + "elles": 24431, + "elli": 3367, + "elli": 6673, + "ellic": 38905, + "ellie": 16769, + "ellier": 44054, + "ellin": 40374, + "elling": 2220, + "ellington": 34477, + "ellini": 43256, + "elliot": 20761, + "elliott": 44456, + "elliott": 13788, + "ellip": 44816, + "ellis": 11553, + "ellison": 32295, + "ello": 2512, + "ellor": 14594, + "ells": 2433, + "ellu": 35560, + "elly": 8041, + "elly": 20355, + "elm": 25199, + "elm": 22082, + "elman": 33622, + "elmer": 45958, + "elmo": 32150, + "elo": 6170, + "elo": 13490, + "elon": 26381, + "elon": 20406, + "elondon": 47377, + "elong": 44363, + "elonmusk": 37076, + "elope": 23367, + "eloqu": 37795, + "elos": 44733, + "elot": 43490, + "elove": 43319, + "elove": 19165, + "elover": 21732, + "elovers": 33946, + "els": 35958, + "els": 1645, + "elsa": 22050, + "else": 18857, + "else": 3344, + "elsewhere": 22906, + "elson": 19624, + "elt": 18692, + "elton": 20758, + "elu": 14208, + "elusive": 28903, + "elves": 29111, + "elvi": 47008, + "elvis": 47359, + "elvis": 14498, + "elxn": 37726, + "ely": 12189, + "ely": 1273, + "elyn": 29691, + "elyn": 18126, + "em": 908, + "em": 2270, + "ema": 7002, + "ema": 11131, + "emabiggest": 23101, + "emabiggestfans": 29587, + "email": 33537, + "email": 4462, + "emailed": 40470, + "emailmarketing": 40188, + "emails": 12871, + "eman": 24416, + "eman": 36868, + "emancip": 42996, + "emanuel": 35232, + "emb": 3692, + "embar": 8266, + "embaras": 48019, + "embark": 33953, + "embarra": 11382, + "embarrass": 27183, + "embarrassed": 28217, + "embarrassing": 19653, + "embarrassment": 41346, + "embassy": 13598, + "embe": 46041, + "embed": 19703, + "embedded": 22046, + "embelli": 32144, + "embellished": 46992, + "ember": 47049, + "emblem": 21163, + "embo": 23065, + "embr": 35267, + "embrac": 16928, + "embrace": 12118, + "embraced": 35739, + "embraces": 38404, + "embracing": 22196, + "embro": 12550, + "embroi": 18667, + "embroide": 21530, + "embroidered": 22381, + "embroidery": 20823, + "emc": 20897, + "emc": 31602, + "emcee": 42038, + "eme": 22910, + "eme": 21548, + "emea": 40352, + "emed": 11028, + "emen": 22033, + "ement": 40841, + "ement": 2057, + "ements": 11058, + "emer": 3132, + "emer": 25727, + "emerald": 46878, + "emerald": 16980, + "emerge": 22182, + "emerged": 26425, + "emergen": 24096, + "emergence": 39867, + "emergencies": 35759, + "emergency": 44038, + "emergency": 5897, + "emerges": 30801, + "emerging": 38174, + "emerging": 11113, + "emeritus": 35333, + "emerson": 24147, + "emery": 32678, + "emi": 44327, + "emi": 18525, + "emil": 26794, + "emil": 40624, + "emile": 43926, + "emili": 20709, + "emilia": 34238, + "emilio": 39722, + "emily": 14545, + "emily": 7640, + "emin": 17227, + "emin": 23995, + "eminem": 22129, + "eminent": 33779, + "eming": 40398, + "emir": 13337, + "emir": 47613, + "emirates": 47244, + "emirates": 17867, + "emission": 27761, + "emissions": 14172, + "emit": 49043, + "emma": 18177, + "emma": 7445, + "emmanuel": 48045, + "emmanuel": 20411, + "emmett": 45779, + "emmy": 35625, + "emmy": 17089, + "emmys": 21875, + "emo": 3738, + "emo": 19381, + "emoji": 16327, + "emojis": 27870, + "emon": 34406, + "emor": 45034, + "emory": 44274, + "emotion": 17464, + "emotional": 7357, + "emotionally": 24088, + "emotions": 12904, + "emp": 3831, + "emp": 41004, + "empathy": 22420, + "emper": 12522, + "emperor": 13828, + "empha": 16237, + "emphasi": 47176, + "emphasis": 29588, + "empire": 26212, + "empire": 7614, + "empires": 46510, + "emplo": 3409, + "employ": 37290, + "employ": 39626, + "employe": 5037, + "employed": 26567, + "employee": 36631, + "employee": 9560, + "employees": 7377, + "employer": 21296, + "employers": 17647, + "employment": 10959, + "empor": 27386, + "emporium": 48541, + "empower": 13612, + "empower": 17230, + "empowered": 29087, + "empowering": 20086, + "empowerment": 15747, + "empowers": 46206, + "empress": 26656, + "empty": 41203, + "empty": 7893, + "emra": 39259, + "ems": 2858, + "emt": 46360, + "emu": 48149, + "emu": 29296, + "emul": 23272, + "emy": 31076, + "en": 524, + "en": 576, + "ena": 3452, + "enab": 17308, + "enable": 15642, + "enabled": 23666, + "enables": 23417, + "enabling": 23590, + "enam": 41486, + "enamel": 22746, + "enary": 13132, + "enas": 34536, + "enation": 20860, + "enberg": 15658, + "enburg": 28430, + "enc": 33169, + "enca": 37774, + "encan": 30345, + "encapsul": 40874, + "ence": 6495, + "ence": 954, + "enced": 6549, + "ences": 3777, + "enchan": 17290, + "enchanted": 28258, + "enchanting": 32531, + "enchil": 47396, + "enci": 32207, + "encia": 30068, + "encies": 18729, + "encing": 10326, + "enclosed": 43243, + "enclosure": 37419, + "encom": 44026, + "encore": 20549, + "encoun": 17309, + "encounter": 13164, + "encountered": 32492, + "encounters": 25399, + "encoura": 6169, + "encourage": 12090, + "encouraged": 20299, + "encouragement": 24959, + "encourages": 23848, + "encouraging": 15875, + "encro": 45822, + "encry": 28600, + "encryp": 42928, + "encrypted": 48710, + "encryption": 31423, + "ency": 3484, + "encyclo": 32104, + "encyclopedia": 38376, + "end": 945, + "end": 806, + "enda": 6735, + "endale": 20290, + "endange": 13990, + "endangered": 14931, + "ende": 11373, + "ende": 40306, + "endeav": 18134, + "endeavor": 40502, + "endeavors": 44394, + "endeavour": 38035, + "ended": 2622, + "endemic": 41241, + "endent": 16265, + "ender": 48106, + "ender": 12383, + "enders": 7418, + "endez": 43850, + "endgame": 23042, + "endi": 31359, + "ending": 2695, + "endings": 36516, + "endish": 38841, + "endless": 12688, + "endlessly": 45145, + "endment": 45894, + "endo": 13476, + "endo": 15830, + "endocr": 36486, + "endof": 40786, + "endome": 46996, + "endon": 48018, + "endor": 8092, + "endorf": 37249, + "endorse": 28819, + "endorsed": 24307, + "endorsement": 21205, + "endorses": 34603, + "endorsing": 46779, + "endow": 45895, + "endra": 22321, + "ends": 1339, + "endthe": 46256, + "endu": 26032, + "endur": 19557, + "endurance": 21027, + "endure": 32419, + "enduring": 30851, + "enduro": 47042, + "ene": 3297, + "ene": 6049, + "ened": 2494, + "eneed": 45137, + "enegger": 33235, + "enei": 48906, + "enemies": 15824, + "enemy": 10310, + "enen": 45113, + "ener": 2244, + "ener": 13600, + "energ": 39451, + "energetic": 24197, + "energi": 23044, + "energies": 42374, + "energized": 48635, + "energy": 14974, + "energy": 2650, + "energye": 32271, + "energyefficiency": 40586, + "eners": 48208, + "enes": 42066, + "eness": 11806, + "enet": 46336, + "enew": 29672, + "enews": 13442, + "eney": 20706, + "enez": 33110, + "enf": 38167, + "enfield": 27808, + "enfor": 10592, + "enforce": 40224, + "enforced": 44597, + "enforcement": 12460, + "eng": 1035, + "eng": 6730, + "enga": 22297, + "engag": 6793, + "engage": 11089, + "engaged": 11475, + "engagement": 7281, + "engaging": 13060, + "enge": 26279, + "enge": 2742, + "engel": 38265, + "engen": 48286, + "enger": 6618, + "engers": 7533, + "engine": 3355, + "engine": 5857, + "engineer": 40151, + "engineer": 8517, + "engineered": 26580, + "engineering": 5273, + "engineers": 11494, + "engines": 14487, + "england": 20904, + "england": 3595, + "english": 15942, + "english": 3469, + "engra": 17560, + "engraved": 29421, + "engraving": 33309, + "engul": 43655, + "engv": 28401, + "enh": 7449, + "enhall": 48781, + "enham": 24592, + "enhan": 26827, + "enhance": 13993, + "enhanced": 16070, + "enhancement": 35601, + "enhances": 38259, + "enhancing": 25986, + "eni": 4395, + "eni": 17538, + "enic": 46780, + "enic": 28292, + "enig": 19754, + "enig": 48730, + "enight": 32848, + "enight": 20640, + "enigma": 34998, + "ening": 1133, + "enium": 34380, + "enix": 25720, + "enjo": 1498, + "enjoy": 12981, + "enjoy": 2218, + "enjoyable": 17444, + "enjoyed": 5045, + "enjoying": 3603, + "enjoyment": 34905, + "enjoys": 17024, + "enka": 43942, + "enko": 25312, + "enlar": 38136, + "enligh": 21364, + "enlighten": 28200, + "enlightened": 44032, + "enlightening": 44005, + "enlightenment": 29255, + "enlisted": 43555, + "enly": 43023, + "enn": 43563, + "enna": 8095, + "enne": 21176, + "enne": 11518, + "ennedy": 46266, + "ennes": 43613, + "enni": 7049, + "ennial": 14220, + "ennis": 48923, + "ennis": 26309, + "eno": 9429, + "eno": 12843, + "enoch": 47917, + "enor": 13955, + "enormous": 20129, + "enos": 44759, + "enote": 44955, + "enough": 2744, + "enow": 26876, + "enqu": 28417, + "enqui": 22810, + "enquire": 46658, + "enquiries": 31901, + "enquiry": 45141, + "enri": 18915, + "enrich": 20058, + "enrich": 45504, + "enriched": 45166, + "enrichment": 32903, + "enrique": 25489, + "enrol": 44279, + "enroll": 23739, + "enroll": 30366, + "enrolled": 36853, + "enrollment": 24875, + "enroute": 40548, + "ens": 41799, + "ens": 1323, + "ense": 12657, + "ense": 27658, + "ensemble": 14843, + "ensis": 32842, + "ensla": 37535, + "enslaved": 48675, + "ensure": 7492, + "ensures": 29707, + "ensuring": 19403, + "ent": 724, + "ent": 621, + "enta": 17681, + "ental": 32342, + "ental": 6168, + "entary": 9833, + "entation": 37412, + "ente": 17433, + "ente": 9935, + "ented": 3800, + "entennial": 43088, + "enter": 2963, + "enter": 3819, + "entered": 10679, + "entering": 12580, + "enterpri": 7339, + "enterprise": 9220, + "enterprises": 21219, + "enters": 15287, + "entertain": 5566, + "entertain": 23510, + "entertained": 30631, + "entertainer": 28674, + "entertaining": 13897, + "entertainment": 6166, + "entes": 24213, + "enthr": 36202, + "enthusi": 9631, + "enthusiasm": 20525, + "enthusiast": 27153, + "enthusiastic": 22068, + "enthusiasts": 27514, + "enti": 1938, + "ential": 5194, + "entially": 37695, + "entic": 10340, + "entine": 49212, + "enting": 20526, + "entire": 4709, + "entirely": 13911, + "entirety": 43242, + "entit": 15209, + "entities": 38134, + "entitled": 18680, + "entity": 28455, + "ently": 2922, + "ento": 21917, + "ento": 8762, + "entom": 31676, + "entourage": 47893, + "entr": 7129, + "entrance": 9129, + "entrata": 27304, + "entre": 34188, + "entre": 19600, + "entren": 46959, + "entrepre": 4583, + "entreprene": 4789, + "entrepreneu": 26784, + "entrepreneur": 12119, + "entrepreneur": 8033, + "entrepreneurial": 28261, + "entrepreneurs": 11054, + "entrepreneurship": 12858, + "entries": 13766, + "entry": 5362, + "ents": 870, + "entu": 6650, + "enty": 5657, + "enu": 23430, + "env": 32280, + "env": 39207, + "envel": 20052, + "envelope": 27358, + "envir": 3512, + "enviro": 46200, + "environ": 3599, + "environment": 33039, + "environment": 5501, + "environmental": 7831, + "environmentally": 32855, + "environments": 19577, + "envision": 49031, + "envoy": 29263, + "envy": 21017, + "eny": 20482, + "enya": 36509, + "enyc": 39520, + "enz": 25805, + "enz": 31873, + "enza": 25239, + "enzie": 14839, + "enzo": 31543, + "enzyme": 40348, + "enzymes": 47465, + "eo": 16054, + "eo": 11712, + "eoin": 48634, + "eon": 31915, + "eos": 17805, + "ep": 1178, + "ep": 1117, + "epa": 15866, + "epage": 26931, + "epaper": 33584, + "epcot": 32524, + "eper": 43071, + "eph": 45752, + "eph": 41240, + "ephe": 25129, + "epi": 7219, + "epi": 34641, + "epic": 12683, + "epic": 4991, + "epiconetsy": 49222, + "epide": 17382, + "epidemi": 44447, + "epidemic": 21522, + "epile": 23150, + "epilepsy": 29547, + "epilo": 31291, + "epilots": 39766, + "epiph": 40561, + "epiphany": 43251, + "epis": 24616, + "episcop": 28037, + "episcopal": 31221, + "episo": 2708, + "episode": 2965, + "episodes": 11837, + "epit": 21967, + "epitome": 35114, + "epl": 25950, + "epo": 25810, + "epp": 39054, + "epp": 39593, + "eps": 4090, + "epsilon": 40019, + "epsom": 40364, + "epstein": 34688, + "eq": 39331, + "eq": 33692, + "equ": 2563, + "equal": 17373, + "equal": 10433, + "equality": 48981, + "equality": 9578, + "equally": 18172, + "equals": 30278, + "equation": 28591, + "equations": 38225, + "eque": 19518, + "equestrian": 24728, + "equi": 8752, + "equili": 43262, + "equine": 33801, + "equinox": 32652, + "equip": 6526, + "equip": 36979, + "equipment": 6893, + "equipo": 45688, + "equipped": 18331, + "equitable": 44717, + "equities": 44015, + "equity": 11293, + "equivalent": 19489, + "er": 517, + "er": 528, + "era": 30548, + "era": 2072, + "erable": 18801, + "erad": 24194, + "eradic": 36346, + "eradicate": 46164, + "eral": 6222, + "eran": 13069, + "eras": 19325, + "eras": 39090, + "erase": 33893, + "erased": 46762, + "erasmus": 38935, + "erc": 5360, + "erc": 32382, + "erd": 25645, + "erdo": 21112, + "erdogan": 24453, + "ere": 17907, + "ere": 642, + "erec": 21526, + "erected": 39365, + "ered": 9097, + "eres": 15751, + "ergon": 38120, + "ergy": 19550, + "eri": 2769, + "eri": 9509, + "eria": 11634, + "erial": 5409, + "eric": 1206, + "eric": 5396, + "erica": 13208, + "erich": 26070, + "erick": 27434, + "erick": 36959, + "erickson": 45286, + "ericsson": 39645, + "eridge": 45408, + "erie": 7005, + "eries": 9099, + "erik": 22805, + "erik": 16532, + "erika": 25531, + "erin": 17532, + "erin": 11333, + "erina": 25176, + "ering": 1785, + "erit": 23335, + "eritrea": 30738, + "erjee": 41665, + "erly": 14380, + "erm": 31649, + "erman": 17990, + "ern": 6992, + "ern": 12140, + "ernal": 20868, + "ernan": 34617, + "ernation": 48796, + "erne": 33930, + "ernest": 23006, + "ernie": 23636, + "ernity": 14653, + "erno": 40812, + "ernst": 30099, + "ero": 3211, + "ero": 3732, + "erock": 38206, + "eron": 32837, + "eroom": 46690, + "eros": 30597, + "erose": 48657, + "erosion": 30174, + "erotic": 30708, + "erotica": 39126, + "erous": 6384, + "eroy": 36461, + "erp": 28268, + "err": 22479, + "err": 25346, + "erra": 48446, + "errands": 45485, + "error": 12097, + "errors": 21195, + "erry": 45236, + "erry": 24124, + "ers": 4840, + "ers": 612, + "ersfc": 37925, + "ership": 2884, + "erson": 25780, + "erson": 6811, + "ert": 40325, + "ert": 3112, + "erta": 32007, + "erton": 26245, + "erts": 12921, + "eru": 36068, + "erun": 41642, + "erup": 17093, + "erupted": 48862, + "eruption": 33705, + "erville": 37557, + "erwin": 43724, + "ery": 12467, + "ery": 1692, + "erz": 38711, + "es": 957, + "es": 542, + "esa": 46834, + "esa": 12489, + "esanders": 23099, + "esc": 3330, + "esc": 28420, + "escal": 15902, + "escap": 11499, + "escape": 32484, + "escape": 7568, + "escaped": 18707, + "escapes": 29916, + "escaping": 21767, + "escar": 39229, + "escence": 37972, + "esch": 46760, + "esch": 41945, + "esco": 32482, + "escobar": 48807, + "escor": 24360, + "escort": 24976, + "escorted": 47667, + "escorts": 48574, + "escu": 36517, + "esday": 19553, + "ese": 18766, + "ese": 2260, + "esg": 41674, + "esh": 17119, + "esh": 13407, + "esha": 28799, + "eshop": 38451, + "eshop": 45570, + "eshopsuk": 39349, + "esi": 30064, + "esis": 12414, + "esk": 19359, + "esl": 26201, + "eso": 29890, + "eso": 28921, + "esof": 17047, + "eson": 46845, + "esp": 3849, + "esp": 13870, + "espa": 37301, + "espan": 41731, + "españa": 41118, + "especially": 4878, + "esper": 29216, + "espino": 46633, + "espionage": 43498, + "espn": 22917, + "espn": 7540, + "espnu": 47747, + "espo": 34381, + "esports": 16035, + "espresso": 17098, + "esq": 47352, + "esqu": 34616, + "esque": 25877, + "ess": 3118, + "ess": 9764, + "essa": 39125, + "essay": 12751, + "essays": 27328, + "esse": 22305, + "essen": 30489, + "essence": 17830, + "essenti": 11163, + "essential": 47264, + "essential": 6895, + "essentially": 30042, + "essentials": 16191, + "essex": 30563, + "essex": 11623, + "est": 2291, + "est": 1509, + "esta": 41449, + "esta": 10135, + "estab": 7010, + "establi": 8412, + "establish": 19709, + "established": 13143, + "establishing": 29420, + "establishment": 20213, + "estas": 39072, + "estate": 47130, + "estate": 6159, + "estates": 26054, + "este": 12968, + "este": 20579, + "esteban": 48381, + "esteem": 31541, + "esteemed": 36293, + "ester": 45808, + "esthe": 18468, + "esther": 24393, + "estim": 8904, + "estimate": 21883, + "estimated": 16665, + "estimates": 21957, + "esto": 31589, + "esto": 23958, + "estonia": 26260, + "estonian": 48895, + "estrada": 48116, + "estre": 31271, + "estu": 26272, + "estuary": 35269, + "esur": 35758, + "esville": 39187, + "esy": 46268, + "et": 1169, + "et": 875, + "eta": 8761, + "etal": 25221, + "etary": 13074, + "etc": 5353, + "etched": 40411, + "etching": 41375, + "ete": 38820, + "ete": 40245, + "eter": 8587, + "eter": 17007, + "eternal": 13732, + "eternally": 48486, + "eternity": 23832, + "eters": 18392, + "etf": 31661, + "eth": 4819, + "eth": 5927, + "ethan": 24245, + "ethan": 15958, + "ethanol": 38166, + "ethe": 21312, + "ethel": 45921, + "ether": 23349, + "ethere": 18705, + "ethereal": 40925, + "ethereum": 19612, + "ethernet": 35026, + "ethi": 10327, + "ethic": 39104, + "ethical": 47041, + "ethical": 17679, + "ethics": 13355, + "ethiop": 10897, + "ethiopia": 13920, + "ethiopian": 24507, + "ethnic": 30522, + "ethnic": 16344, + "ethnicity": 46787, + "ethno": 34225, + "ethos": 48768, + "eti": 11188, + "eti": 30394, + "etienne": 46118, + "eties": 15137, + "etihad": 38489, + "etiquette": 37957, + "etis": 38216, + "etisation": 39733, + "etna": 41940, + "eto": 27829, + "eto": 33837, + "eton": 44339, + "etour": 41462, + "etr": 23012, + "etres": 42838, + "ets": 3442, + "etsy": 13237, + "etsy": 6282, + "etsym": 22902, + "etsymntt": 25416, + "etsyshop": 44643, + "ett": 32729, + "ett": 24998, + "etta": 30466, + "ette": 19981, + "ette": 5212, + "ettes": 35326, + "etto": 44219, + "etty": 40759, + "etu": 36593, + "etv": 49155, + "etv": 20325, + "etwork": 20585, + "ety": 25920, + "ety": 2746, + "etz": 36181, + "etz": 25301, + "eu": 1506, + "eu": 3238, + "eucalyp": 41068, + "eucalyptus": 42351, + "euchar": 38362, + "eugen": 30678, + "eugene": 17760, + "eul": 46749, + "eun": 16431, + "eun": 26219, + "eunhyuk": 47526, + "eup": 44435, + "euph": 21386, + "euphoria": 41051, + "eur": 18343, + "eur": 12018, + "eura": 32605, + "eure": 25311, + "euref": 48017, + "eureka": 31686, + "euro": 2039, + "euro": 8463, + "euroleague": 46821, + "europa": 18290, + "europale": 42473, + "europaleague": 44029, + "europarl": 44922, + "europe": 4198, + "europe": 3848, + "european": 26712, + "european": 4759, + "europeans": 37082, + "euros": 22274, + "eurovision": 17593, + "eurozone": 42555, + "eurusd": 40895, + "eus": 44214, + "euston": 46905, + "euthan": 43280, + "euve": 40652, + "eux": 25019, + "ev": 776, + "ev": 10133, + "eva": 6845, + "evacu": 13187, + "evacuated": 26806, + "evacuation": 27353, + "eval": 25139, + "eval": 9703, + "evalu": 10314, + "evaluate": 27174, + "evaluating": 34541, + "evaluation": 17640, + "evan": 12821, + "evan": 12847, + "evangel": 20518, + "evangeli": 21372, + "evangelical": 36151, + "evangelist": 42275, + "evankirstel": 46581, + "evans": 8836, + "evansville": 44782, + "evapor": 33352, + "evasion": 48795, + "eve": 5732, + "eve": 1866, + "eved": 19820, + "evel": 39315, + "evelyn": 26687, + "evement": 8210, + "even": 6359, + "even": 1427, + "evening": 34487, + "evening": 2285, + "evenings": 19994, + "evenly": 45974, + "event": 10612, + "event": 1655, + "eventful": 45628, + "evento": 38155, + "eventprofs": 24980, + "events": 3667, + "eventu": 14055, + "eventual": 45321, + "eventually": 14397, + "ever": 888, + "ever": 1247, + "everest": 21722, + "everett": 25456, + "everglades": 46294, + "evergreen": 23852, + "everlasting": 32849, + "evers": 31914, + "everton": 13315, + "every": 1091, + "every": 1505, + "everybody": 5901, + "everyday": 25049, + "everyday": 5160, + "everyone": 1584, + "everything": 36376, + "everything": 2410, + "everytime": 16911, + "everywhere": 6364, + "eves": 7323, + "evi": 5348, + "evi": 36989, + "evic": 21336, + "eviction": 37111, + "eviden": 46220, + "evidence": 6439, + "evident": 34529, + "evie": 47195, + "evil": 23218, + "evil": 6006, + "eville": 16143, + "eving": 24729, + "evo": 17962, + "evo": 13169, + "evoc": 43133, + "evol": 5350, + "evolu": 7725, + "evolution": 8902, + "evolutionary": 30629, + "evolve": 23406, + "evolved": 22613, + "evolving": 23675, + "evp": 46154, + "evs": 33576, + "ew": 11942, + "ew": 15428, + "ewan": 40247, + "ewe": 48438, + "ewing": 38873, + "ews": 9878, + "ex": 659, + "ex": 4118, + "exac": 5460, + "exact": 12651, + "exactly": 5840, + "exagger": 29766, + "exal": 49324, + "exam": 4428, + "exam": 8785, + "examination": 20970, + "examine": 25728, + "examined": 44004, + "examiner": 29149, + "examines": 28160, + "examining": 30616, + "example": 6228, + "examples": 14790, + "exams": 14028, + "exas": 47536, + "exc": 1302, + "excav": 20733, + "excavation": 45909, + "exce": 10999, + "exceed": 32521, + "exceeded": 36221, + "exceeding": 47213, + "exceeds": 49353, + "excel": 28351, + "excel": 18754, + "excell": 3298, + "excellence": 8171, + "excellency": 36503, + "excellent": 4239, + "excelsi": 47315, + "excep": 8882, + "except": 8541, + "exception": 25018, + "exceptional": 13425, + "exceptionally": 29306, + "excer": 17737, + "excerpt": 20586, + "excess": 22491, + "excessive": 21332, + "exchange": 6616, + "exchanged": 48919, + "exchanges": 29730, + "exchanging": 47760, + "excit": 10510, + "excite": 47711, + "excited": 1889, + "excitement": 11407, + "exciting": 4300, + "exclu": 3114, + "exclude": 49235, + "excluded": 46216, + "excluding": 44326, + "exclusion": 40219, + "exclusive": 3747, + "exclusively": 13565, + "exclusives": 47149, + "excu": 7324, + "excur": 27533, + "excursion": 34869, + "excuse": 9266, + "excuses": 19388, + "exe": 3554, + "exe": 48027, + "exec": 15052, + "execs": 35728, + "execu": 4360, + "execute": 36405, + "executed": 20432, + "execution": 18085, + "executive": 5944, + "executives": 24357, + "exem": 19753, + "exemp": 28602, + "exempl": 36371, + "exemplary": 39123, + "exempli": 41934, + "exempt": 44278, + "exemption": 47481, + "exer": 40295, + "exerc": 5932, + "exercise": 7016, + "exercises": 19669, + "exercising": 39036, + "exeter": 32137, + "exeter": 18837, + "exfoli": 38823, + "exhau": 11154, + "exhaust": 21812, + "exhausted": 21741, + "exhausting": 40035, + "exhaustion": 49221, + "exhi": 3022, + "exhib": 3783, + "exhibit": 24992, + "exhibit": 8209, + "exhibiting": 23889, + "exhibition": 4219, + "exhibitions": 28311, + "exhibitor": 44192, + "exhibitors": 38542, + "exhibits": 30093, + "exhilar": 40262, + "exhilarating": 49289, + "exi": 5297, + "exico": 38712, + "exile": 28566, + "exist": 10899, + "exist": 9645, + "existed": 23198, + "existence": 13832, + "existent": 43541, + "existential": 38752, + "existing": 12886, + "exists": 14608, + "exit": 9374, + "exited": 37581, + "exiting": 39577, + "exits": 34943, + "exmoor": 48260, + "exo": 15600, + "exo": 5842, + "exodus": 30098, + "exol": 42856, + "exop": 35288, + "exoplan": 37980, + "exor": 24506, + "exorcist": 46309, + "exotic": 15639, + "exp": 9923, + "exp": 19066, + "expan": 7512, + "expand": 10382, + "expand": 13141, + "expanded": 18390, + "expanding": 15755, + "expands": 22223, + "expanse": 46886, + "expansion": 10138, + "expansive": 49261, + "expat": 43900, + "expe": 2560, + "expect": 9802, + "expect": 5716, + "expectation": 34273, + "expectations": 12529, + "expected": 5573, + "expecting": 12525, + "expects": 24536, + "expedition": 16761, + "expeditions": 49327, + "expelled": 48834, + "expen": 7216, + "expend": 29302, + "expenditure": 47044, + "expense": 28473, + "expenses": 21797, + "expensive": 9649, + "exper": 1533, + "experi": 4723, + "experience": 31867, + "experience": 2415, + "experienced": 10417, + "experiences": 8233, + "experiencing": 16643, + "experiential": 44952, + "experim": 6697, + "experiment": 13079, + "experimental": 16539, + "experimenting": 28263, + "experiments": 21077, + "expert": 6284, + "expertise": 16555, + "experts": 6960, + "expi": 26850, + "expir": 35077, + "expire": 49315, + "expired": 30200, + "expires": 34739, + "expl": 3261, + "expla": 3517, + "explain": 48918, + "explain": 7304, + "explained": 14229, + "explaining": 13136, + "explains": 6655, + "explan": 13294, + "explanation": 16577, + "explanations": 34383, + "explic": 21011, + "explicit": 33228, + "explo": 3586, + "explode": 31262, + "exploded": 28947, + "explodes": 38119, + "exploding": 34683, + "exploit": 36953, + "exploited": 48554, + "explor": 11958, + "exploration": 14043, + "explore": 10405, + "explore": 5147, + "explorebc": 38754, + "explorecanada": 36600, + "explored": 25016, + "explorer": 15776, + "explorers": 28491, + "explores": 13996, + "exploring": 7584, + "explosion": 13785, + "explosions": 38646, + "explosive": 18888, + "explosives": 44705, + "expo": 7820, + "expo": 6344, + "expon": 27905, + "export": 14444, + "exporting": 47433, + "exports": 20088, + "expose": 23181, + "exposed": 12180, + "exposes": 33575, + "exposing": 28362, + "exposition": 36943, + "exposure": 11903, + "expre": 6085, + "express": 18553, + "express": 5642, + "expressed": 20777, + "expresses": 31931, + "expressing": 30207, + "expression": 11357, + "expressions": 20314, + "expressive": 42060, + "expressway": 31658, + "exquis": 16575, + "exquisite": 17958, + "ext": 5711, + "ext": 20072, + "exten": 5555, + "extend": 14492, + "extended": 9614, + "extending": 25652, + "extends": 20688, + "extension": 10275, + "extensions": 24525, + "extensive": 16870, + "extensively": 47365, + "extent": 24913, + "exter": 9797, + "exterior": 19352, + "extermin": 41671, + "external": 15028, + "extin": 13553, + "extinct": 24488, + "extinction": 21186, + "extingui": 38567, + "extor": 35620, + "extr": 29082, + "extra": 6416, + "extra": 4231, + "extrac": 18550, + "extract": 18962, + "extraction": 28789, + "extracts": 45576, + "extraordin": 23628, + "extraordinaire": 30909, + "extraordinary": 10982, + "extras": 29817, + "extravag": 22299, + "extravaganza": 29461, + "extre": 3978, + "extreme": 38357, + "extreme": 8331, + "extremely": 6519, + "extremism": 31493, + "extremist": 36383, + "extremists": 41425, + "extru": 43010, + "ey": 1541, + "ey": 1477, + "eyang": 28915, + "eye": 5034, + "eye": 3272, + "eyebrow": 34250, + "eyebrows": 19923, + "eyed": 15512, + "eyeing": 34916, + "eyel": 17075, + "eyelashes": 42074, + "eyeliner": 33354, + "eyeon": 25126, + "eyes": 3095, + "eyeshadow": 35213, + "eyewear": 30165, + "eyewitness": 36258, + "eyou": 31996, + "eyour": 40229, + "eyre": 44115, + "ez": 10082, + "ez": 8387, + "eze": 25993, + "eze": 27229, + "ezekiel": 41428, + "ezra": 27552, + "f": 69, + "f": 325, + "fa": 778, + "fa": 2800, + "faa": 27577, + "fab": 2833, + "fab": 5492, + "faber": 43461, + "faber": 42488, + "fabi": 29425, + "fabian": 34539, + "fabio": 31666, + "fabric": 16217, + "fabric": 10033, + "fabricated": 40851, + "fabrication": 33476, + "fabrics": 23159, + "fabulous": 5189, + "fac": 1053, + "fac": 35438, + "facade": 29217, + "face": 2545, + "face": 1710, + "facebook": 36156, + "facebook": 2943, + "faced": 10941, + "faceli": 32023, + "facelift": 36380, + "faceoff": 42710, + "facep": 45285, + "faces": 4905, + "faceted": 43435, + "facetime": 24076, + "facial": 11909, + "facil": 39973, + "facilit": 13567, + "facilitate": 26733, + "facilitated": 43853, + "facilitating": 34796, + "facilities": 10388, + "facility": 8165, + "facing": 7619, + "fact": 17189, + "fact": 3598, + "factfriday": 27953, + "faction": 14629, + "factor": 21082, + "factor": 8124, + "factories": 36492, + "factors": 12733, + "factory": 42483, + "factory": 6072, + "facts": 5085, + "factual": 45471, + "faculty": 9504, + "facup": 25283, + "fad": 12632, + "fad": 47669, + "fade": 20486, + "faded": 26051, + "fades": 40441, + "fading": 32882, + "fadnavis": 38945, + "faf": 31052, + "faf": 43903, + "fag": 25617, + "fag": 39305, + "fah": 25495, + "fah": 35429, + "fahren": 45527, + "fai": 20519, + "fai": 26384, + "fail": 7105, + "fail": 6801, + "failed": 8314, + "failing": 15757, + "fails": 13388, + "failure": 8732, + "failures": 25442, + "faint": 30807, + "fair": 3031, + "fair": 2849, + "fairbanks": 43962, + "faire": 34745, + "faire": 20798, + "fairfax": 29368, + "fairfield": 29664, + "fairgrounds": 38325, + "fairi": 28884, + "fairies": 33590, + "fairly": 14961, + "fairmont": 41547, + "fairness": 29388, + "fairs": 8655, + "fairtrade": 33361, + "fairview": 43479, + "fairway": 44022, + "fairy": 17021, + "fairy": 10444, + "fairytale": 28944, + "fais": 23542, + "faisal": 35459, + "fait": 20567, + "faith": 10653, + "faith": 5080, + "faithful": 15511, + "faiz": 41775, + "fake": 18794, + "fake": 5777, + "faken": 22853, + "fakenews": 26943, + "fakespeare": 49095, + "fal": 2778, + "fal": 40494, + "fala": 47120, + "falcon": 22498, + "falcon": 13571, + "falcons": 13834, + "falk": 34648, + "falkirk": 44080, + "fall": 6489, + "fall": 2359, + "fallen": 8688, + "falling": 48709, + "falling": 7293, + "fallon": 39596, + "fallon": 21281, + "fallontonight": 44627, + "fallout": 49365, + "fallout": 16009, + "falls": 4778, + "falmouth": 38261, + "false": 38948, + "false": 9078, + "falsely": 42321, + "fam": 1058, + "fam": 5128, + "fame": 6573, + "famed": 23302, + "famer": 24554, + "famil": 3395, + "famili": 8488, + "familia": 25622, + "familiar": 10020, + "families": 4612, + "family": 8137, + "family": 1315, + "familyfun": 46308, + "familytime": 47236, + "familytravel": 38222, + "famine": 35847, + "famous": 44811, + "famous": 4096, + "famously": 44505, + "fan": 1675, + "fan": 2261, + "fanart": 41059, + "fanart": 7855, + "fanartfriday": 45346, + "fanatic": 36643, + "fanatics": 39610, + "fanbase": 36921, + "fanboy": 43369, + "fanc": 29017, + "fancafe": 45080, + "fanci": 35908, + "fanclub": 31530, + "fancy": 47622, + "fancy": 6733, + "fand": 19684, + "fandom": 47634, + "fandom": 11534, + "fanfest": 42916, + "fanfic": 47243, + "fang": 14269, + "fang": 27428, + "fangirl": 28813, + "fangirling": 39463, + "fanning": 37282, + "fanny": 30401, + "fans": 32454, + "fans": 1840, + "fansign": 25288, + "fant": 4467, + "fanta": 2703, + "fantaken": 39412, + "fantasia": 49306, + "fantastic": 31289, + "fantastic": 2935, + "fantasy": 15124, + "fantasy": 5267, + "fantasyfootball": 35713, + "fao": 31155, + "faq": 28533, + "far": 1578, + "far": 2384, + "fara": 48562, + "farage": 28340, + "farah": 31547, + "fare": 8620, + "fare": 6461, + "fares": 27525, + "farewell": 10734, + "fargo": 18870, + "fari": 26197, + "farley": 43761, + "farm": 9066, + "farm": 3985, + "farmer": 19735, + "farmer": 10474, + "farmers": 29752, + "farmers": 6402, + "farmersmarket": 41808, + "farmhouse": 26293, + "farming": 10399, + "farmington": 49305, + "farmland": 45258, + "farms": 11277, + "farn": 27527, + "faroo": 39147, + "farra": 33657, + "farrakhan": 46293, + "farrell": 24234, + "fart": 34664, + "farther": 42233, + "fas": 4830, + "fas": 42995, + "fasci": 17191, + "fascin": 7327, + "fascinated": 32964, + "fascinating": 8640, + "fascism": 28213, + "fascist": 23870, + "fascists": 43598, + "fash": 42682, + "fashi": 2099, + "fashion": 6976, + "fashion": 2444, + "fashionable": 24597, + "fashionblogger": 31726, + "fashioned": 21563, + "fashioni": 26062, + "fashionista": 30415, + "fashions": 37601, + "fashionshow": 45653, + "fashionweek": 28684, + "fass": 42398, + "fast": 8509, + "fast": 1953, + "fasten": 44990, + "faster": 8835, + "fastest": 9808, + "fasting": 24656, + "fat": 4751, + "fat": 5484, + "fatal": 12124, + "fatalities": 44168, + "fatally": 34069, + "fate": 26315, + "fate": 11734, + "father": 11607, + "father": 3224, + "fathers": 12780, + "fathersday": 16731, + "fati": 13430, + "fatigue": 23747, + "fatima": 28202, + "fats": 30151, + "fatt": 44131, + "fatty": 22953, + "fau": 5571, + "fau": 31381, + "faucet": 44273, + "faul": 16230, + "faulkner": 37840, + "fault": 13862, + "faults": 42752, + "faulty": 47103, + "fauna": 30808, + "faust": 44772, + "faux": 19429, + "fav": 1355, + "fav": 5426, + "fave": 7272, + "faves": 18003, + "favor": 1766, + "favor": 12160, + "favorable": 35392, + "favored": 46640, + "favorite": 35262, + "favorite": 1916, + "favorited": 36926, + "favorites": 10564, + "favors": 36085, + "favour": 3111, + "favour": 20469, + "favourite": 3342, + "favourites": 16585, + "favs": 18879, + "faw": 21800, + "fawad": 46425, + "fawn": 48624, + "fax": 32535, + "fax": 9337, + "fay": 8939, + "fay": 40074, + "faye": 30257, + "fayette": 32043, + "fayette": 19782, + "fayetteville": 37771, + "fayre": 34982, + "faz": 26238, + "faze": 44880, + "fb": 22637, + "fb": 3307, + "fball": 29663, + "fbf": 20004, + "fbi": 10293, + "fbloggers": 41389, + "fbs": 48454, + "fc": 4278, + "fc": 1399, + "fca": 24540, + "fcb": 26639, + "fcb": 25045, + "fcbarcelona": 32174, + "fcbayern": 35033, + "fcblive": 44608, + "fcc": 21240, + "fck": 40080, + "fck": 49263, + "fcofficial": 27805, + "fcs": 32095, + "fcu": 47898, + "fd": 16972, + "fd": 11525, + "fda": 17823, + "fdi": 45579, + "fdn": 18563, + "fdny": 41084, + "fdr": 42298, + "fe": 623, + "fe": 873, + "fear": 8744, + "fear": 5402, + "feared": 31154, + "fearless": 17470, + "fears": 13867, + "fearthe": 33449, + "feasi": 34977, + "feast": 37963, + "feast": 9564, + "feat": 1703, + "feat": 5611, + "feather": 24905, + "feather": 17871, + "feathers": 21138, + "featherweight": 44939, + "feature": 30413, + "feature": 4527, + "featured": 4743, + "features": 4643, + "featuring": 3706, + "feb": 4317, + "febru": 4202, + "february": 4248, + "fect": 31293, + "fed": 22518, + "fed": 7035, + "feder": 4737, + "federal": 6369, + "federation": 15530, + "federer": 18246, + "federico": 40539, + "fedex": 32603, + "fedora": 45111, + "feds": 30593, + "fee": 28242, + "fee": 9224, + "feed": 6662, + "feed": 5839, + "feedback": 8683, + "feeder": 24482, + "feeders": 44523, + "feeding": 9879, + "feeds": 21788, + "feel": 2408, + "feel": 2051, + "feelin": 19903, + "feeling": 33087, + "feeling": 3045, + "feelings": 9452, + "feels": 4808, + "feelthe": 22322, + "feelthebern": 27743, + "fees": 11765, + "feet": 4804, + "fei": 23441, + "fei": 34217, + "fein": 46707, + "feinstein": 41313, + "fel": 2081, + "fel": 20304, + "feld": 45913, + "feld": 14219, + "feldman": 41942, + "feli": 7498, + "felic": 25845, + "felici": 23379, + "felicia": 41139, + "felicidades": 41648, + "felicity": 35123, + "feline": 29471, + "felipe": 27681, + "felix": 33455, + "felix": 16514, + "feliz": 26104, + "feliz": 20221, + "fell": 33540, + "fell": 6266, + "fella": 17586, + "fellas": 18787, + "feller": 29226, + "fellow": 12099, + "fellow": 5242, + "fellows": 15766, + "fellowship": 13857, + "felony": 31068, + "felt": 5413, + "fem": 24574, + "fem": 36615, + "fema": 41721, + "female": 22062, + "female": 3970, + "females": 21028, + "femi": 38607, + "femin": 11423, + "femini": 11894, + "feminine": 24911, + "feminism": 18784, + "feminist": 14921, + "feminists": 38809, + "femme": 31331, + "fen": 5509, + "fen": 25024, + "fence": 12679, + "fences": 34312, + "fencing": 23489, + "fender": 17117, + "fener": 41208, + "fenerbah": 46652, + "feng": 33291, + "fennel": 28689, + "fent": 26395, + "fenton": 47265, + "fenway": 29206, + "fer": 1765, + "fer": 2897, + "fera": 37705, + "feral": 29972, + "ferdin": 25541, + "ferdinand": 27591, + "fere": 43144, + "feren": 35652, + "ference": 19984, + "ferg": 44938, + "fergie": 39119, + "fergu": 10988, + "fergus": 42041, + "ferguson": 11904, + "fermentation": 45817, + "fermented": 36886, + "fern": 10747, + "fern": 21685, + "fernandes": 44391, + "fernandez": 23436, + "fernando": 17140, + "ferns": 38277, + "feroci": 45652, + "ferr": 7256, + "ferra": 47911, + "ferrari": 9606, + "ferre": 29626, + "ferred": 10432, + "ferreira": 48686, + "ferrell": 41112, + "ferrer": 38904, + "ferri": 42008, + "ferries": 28489, + "ferris": 27532, + "ferry": 38936, + "ferry": 10278, + "fers": 12378, + "fert": 14925, + "fert": 43662, + "fertil": 41987, + "fertile": 44837, + "fertili": 23912, + "fertility": 23528, + "fertilizer": 36786, + "fery": 47448, + "fes": 32300, + "fest": 17383, + "fest": 2590, + "festa": 42124, + "festi": 1943, + "festiv": 19222, + "festival": 20946, + "festival": 2240, + "festivals": 17834, + "festive": 9533, + "festivities": 21020, + "fet": 21409, + "feta": 31705, + "fetal": 42031, + "fetch": 30271, + "fete": 34629, + "fett": 37979, + "fetus": 26768, + "feu": 24912, + "feu": 32990, + "feud": 27365, + "fever": 40896, + "fever": 9989, + "fevre": 43861, + "few": 1939, + "fewer": 19128, + "fex": 41584, + "fex": 26392, + "fey": 39069, + "fey": 23298, + "fez": 43081, + "ff": 1021, + "ff": 1304, + "ffa": 15355, + "ffame": 42873, + "ffc": 19832, + "ffe": 1138, + "ffe": 8631, + "ffect": 29151, + "ffed": 8448, + "ffee": 26377, + "ffel": 22656, + "ffen": 46537, + "ffer": 27369, + "ffer": 11636, + "ffers": 32163, + "fferty": 44771, + "ffes": 46441, + "ffey": 30138, + "fff": 28106, + "ffi": 19961, + "ffic": 4762, + "ffice": 26044, + "ffici": 3639, + "fficial": 39818, + "fficial": 6463, + "fficiency": 27800, + "fficient": 20424, + "ffin": 12779, + "ffin": 7367, + "ffing": 16592, + "ffins": 17898, + "ffl": 39490, + "ffle": 7749, + "ffler": 39819, + "ffles": 19344, + "ffman": 15823, + "ffo": 42264, + "ffs": 4424, + "ffxiv": 26569, + "ffxv": 46786, + "ffy": 26404, + "ffy": 7795, + "fg": 45977, + "fg": 6823, + "fgm": 32178, + "fgo": 46113, + "fh": 21649, + "fh": 21010, + "fhs": 45094, + "fi": 701, + "fi": 3589, + "fia": 8827, + "fiable": 34373, + "fianc": 27752, + "fiance": 44114, + "fiancé": 34039, + "fiasco": 40944, + "fiat": 16740, + "fiawec": 39485, + "fib": 40594, + "fiba": 34993, + "fiber": 35074, + "fiber": 12612, + "fibers": 44587, + "fibre": 21401, + "fibro": 21294, + "fibrosis": 36307, + "fic": 1788, + "fic": 2059, + "fica": 26952, + "fically": 14854, + "fication": 4523, + "fications": 12512, + "ficial": 48192, + "fics": 42505, + "fiction": 6218, + "fictional": 25570, + "fid": 34197, + "fid": 23966, + "fidd": 25218, + "fiddle": 35968, + "fide": 45375, + "fidel": 21740, + "fidel": 36837, + "fidelity": 30109, + "fidget": 48664, + "fie": 28487, + "fie": 10348, + "fied": 29642, + "fied": 2853, + "fiel": 1361, + "field": 7571, + "field": 1570, + "fielder": 11046, + "fieldhouse": 37969, + "fielding": 30465, + "fields": 6494, + "fieldwork": 33155, + "fiends": 37869, + "fier": 11167, + "fier": 10598, + "fierc": 48609, + "fierce": 13896, + "fiercely": 49039, + "fiers": 16113, + "fiery": 24557, + "fies": 9537, + "fiesta": 14580, + "fif": 5309, + "fifa": 21976, + "fifa": 8516, + "fifaworldcup": 38819, + "fifawwc": 41329, + "fife": 24374, + "fifteen": 29504, + "fifth": 25515, + "fifth": 8772, + "fifthharmony": 31075, + "fifty": 24456, + "fifty": 15978, + "fig": 4814, + "fig": 20719, + "figaro": 48044, + "figh": 23274, + "fight": 5262, + "fight": 2757, + "fighter": 35884, + "fighter": 6438, + "fighters": 7371, + "fightfor": 48909, + "fightfor": 35740, + "fighting": 38625, + "fighting": 4652, + "fighton": 45578, + "fights": 12132, + "figs": 38882, + "figu": 6390, + "figur": 16948, + "figurative": 44042, + "figure": 48820, + "figure": 5274, + "figured": 15630, + "figures": 8739, + "figurine": 33306, + "figuring": 31513, + "fiji": 48270, + "fiji": 18285, + "fik": 46589, + "fil": 1142, + "fil": 14915, + "fila": 30992, + "filament": 49252, + "file": 12545, + "file": 4512, + "filed": 13864, + "files": 7850, + "filet": 43155, + "fili": 9590, + "filing": 16576, + "filip": 14368, + "filipino": 19153, + "fill": 15904, + "fill": 6277, + "filled": 5589, + "filler": 32816, + "fillers": 45005, + "fillet": 39276, + "filling": 9736, + "fillion": 38048, + "fillmore": 43922, + "fills": 21750, + "filly": 27690, + "film": 5117, + "film": 1860, + "filmed": 15801, + "filmfare": 42224, + "filmfest": 24508, + "filmfestival": 28066, + "filming": 6866, + "filmmaker": 17202, + "filmmakers": 24896, + "filmmaking": 18226, + "films": 5370, + "fils": 40271, + "filter": 7541, + "filtered": 29926, + "filtering": 47770, + "filters": 18385, + "filth": 39713, + "filthy": 26899, + "filtr": 21408, + "filtration": 42036, + "fim": 47525, + "fin": 735, + "fin": 10663, + "fina": 34497, + "final": 11968, + "final": 1755, + "finale": 7844, + "finalfantasy": 44543, + "finalfour": 46999, + "finalist": 12620, + "finalists": 13422, + "finalized": 48930, + "finally": 1992, + "finals": 4536, + "finan": 4807, + "finance": 6117, + "finances": 28767, + "financi": 12846, + "financial": 19783, + "financial": 4930, + "financially": 28124, + "financing": 18375, + "finch": 18523, + "find": 18638, + "find": 1416, + "finder": 15045, + "finders": 43884, + "findia": 47064, + "finding": 37455, + "finding": 6002, + "findings": 16529, + "findlay": 48227, + "findom": 36463, + "finds": 6680, + "findyour": 25936, + "findyourpark": 38924, + "fine": 12042, + "fine": 3797, + "fineart": 7484, + "fineart": 16005, + "fineartamerica": 7724, + "fined": 20094, + "finely": 46120, + "finer": 36681, + "fines": 25053, + "finesse": 46047, + "finest": 7707, + "fing": 6485, + "fing": 17955, + "finger": 13480, + "finger": 8895, + "fingerprint": 39579, + "fingers": 9690, + "fini": 2405, + "finish": 42178, + "finish": 3958, + "finished": 3078, + "finisher": 38636, + "finishers": 48661, + "finishes": 13078, + "finishing": 7912, + "finite": 48312, + "finity": 41463, + "finity": 21273, + "fink": 40158, + "finland": 10775, + "finley": 41652, + "finn": 28479, + "finn": 16925, + "finna": 35180, + "finnish": 19616, + "fino": 30083, + "fins": 32810, + "fintech": 48929, + "fintech": 8899, + "fion": 27476, + "fiona": 20099, + "fior": 37086, + "fiore": 44997, + "fioren": 33188, + "fiorentina": 43713, + "fios": 42521, + "fir": 770, + "fir": 16233, + "fire": 2951, + "fire": 1769, + "firearm": 40311, + "firearms": 23960, + "fireball": 40543, + "firec": 42806, + "fired": 8846, + "firefighter": 20498, + "firefighters": 12600, + "firefly": 33997, + "firefox": 35372, + "fireman": 46085, + "firen": 34752, + "firenze": 38445, + "fireplace": 23050, + "fires": 8749, + "fireside": 36185, + "firework": 40750, + "fireworks": 10641, + "firing": 15105, + "firm": 16936, + "firm": 7705, + "firmly": 29156, + "firms": 13655, + "firmware": 42691, + "first": 6853, + "first": 874, + "firstdayof": 44297, + "firsth": 48512, + "firsts": 47884, + "firth": 26078, + "fis": 7846, + "fis": 47683, + "fiscal": 20825, + "fischer": 26532, + "fish": 6431, + "fish": 2759, + "fisher": 11175, + "fisher": 9176, + "fisheries": 24612, + "fisherman": 25055, + "fishermen": 28547, + "fishers": 42065, + "fishery": 49057, + "fishes": 35470, + "fishing": 31703, + "fishing": 4935, + "fishy": 35665, + "fist": 48340, + "fist": 17085, + "fit": 2366, + "fit": 2478, + "fitbit": 33768, + "fitch": 44614, + "fitfam": 20662, + "fitnes": 47285, + "fitness": 20044, + "fitness": 4838, + "fits": 6401, + "fitt": 32994, + "fitted": 14863, + "fitter": 42096, + "fitters": 32364, + "fitting": 11769, + "fittings": 45787, + "fitz": 11120, + "fitz": 25913, + "fitzgerald": 20606, + "fitzpatrick": 37141, + "fiu": 38374, + "five": 19508, + "five": 3127, + "fives": 44066, + "fix": 4596, + "fix": 6028, + "fixed": 9393, + "fixes": 25473, + "fixing": 17423, + "fixture": 17317, + "fixtures": 19904, + "fizz": 31242, + "fj": 43183, + "fj": 46447, + "fjor": 31260, + "fk": 12410, + "fl": 1082, + "fl": 2685, + "fla": 1577, + "fla": 20292, + "flag": 11536, + "flag": 4859, + "flagged": 45012, + "flags": 12221, + "flagship": 19779, + "flagstaff": 40406, + "flair": 24938, + "flake": 21221, + "flakes": 20934, + "flam": 10559, + "flame": 40351, + "flame": 13484, + "flamen": 28826, + "flamenco": 37362, + "flames": 13441, + "flamin": 42693, + "flaming": 34782, + "flamingo": 30323, + "flan": 14572, + "flanagan": 28641, + "flanders": 34837, + "flank": 44553, + "flann": 39510, + "flannel": 37807, + "flap": 35253, + "flappy": 40241, + "flare": 21185, + "flares": 46088, + "flash": 6089, + "flash": 5815, + "flashback": 14616, + "flashback": 11988, + "flashbackfriday": 15014, + "flashbacks": 47056, + "flashes": 31259, + "flashing": 31764, + "flashlight": 37256, + "flask": 36194, + "flat": 8986, + "flat": 6313, + "flats": 17228, + "flatt": 45498, + "flattering": 43267, + "flaun": 41421, + "flav": 7191, + "flavo": 28895, + "flavor": 31835, + "flavor": 11818, + "flavored": 29350, + "flavorful": 49135, + "flavors": 16930, + "flavour": 17026, + "flavoured": 42397, + "flavours": 21083, + "flaw": 14268, + "flaw": 34978, + "flawed": 35136, + "flawless": 15531, + "flaws": 30492, + "flax": 43443, + "fle": 2428, + "fle": 44964, + "flea": 24883, + "fleck": 28143, + "fled": 26731, + "flee": 19427, + "flee": 30167, + "fleece": 25038, + "fleeing": 30543, + "fleek": 43513, + "fleet": 35922, + "fleet": 9147, + "fleetwood": 28883, + "fleming": 25769, + "fler": 48789, + "flesh": 17495, + "flet": 16102, + "fletcher": 19810, + "fleur": 28593, + "flew": 13768, + "flex": 16426, + "flex": 12038, + "flexi": 10032, + "flexibility": 22547, + "flexible": 14502, + "flexing": 48483, + "fli": 2472, + "flick": 13746, + "flick": 23414, + "flickr": 17755, + "flies": 8070, + "flight": 24701, + "flight": 3795, + "flights": 10515, + "flin": 24730, + "flin": 43816, + "flinders": 44647, + "fling": 22768, + "flint": 28306, + "flint": 18324, + "flip": 20385, + "flip": 11035, + "flipk": 30829, + "flipkart": 33154, + "flipped": 28144, + "flipping": 25881, + "flips": 35089, + "flir": 24330, + "flirt": 38352, + "flirting": 35243, + "flix": 40663, + "flo": 1945, + "flo": 20711, + "float": 16123, + "floating": 12619, + "floats": 33272, + "flock": 36297, + "flock": 21822, + "flondon": 47366, + "floo": 4062, + "flood": 23793, + "flood": 7148, + "flooded": 19706, + "flooding": 10204, + "floods": 16369, + "floor": 23657, + "floor": 4125, + "flooring": 19227, + "floors": 15671, + "flop": 22994, + "floppy": 38267, + "flops": 29146, + "flor": 15784, + "flor": 41669, + "flora": 18906, + "floral": 10732, + "florals": 48331, + "floren": 37706, + "florence": 11617, + "flores": 21537, + "flori": 3482, + "florian": 41861, + "florida": 34264, + "florida": 3966, + "florist": 38403, + "floss": 36453, + "flotus": 35181, + "flour": 18592, + "flouri": 23239, + "flourish": 36038, + "flow": 2180, + "flow": 5608, + "flower": 12772, + "flower": 4055, + "flowering": 19953, + "flowers": 4023, + "flowing": 14922, + "flown": 25659, + "flows": 16715, + "floyd": 46369, + "floyd": 13656, + "flu": 3698, + "flu": 13528, + "fluctu": 40181, + "fluence": 38169, + "fluent": 30025, + "fluff": 31174, + "fluffy": 40346, + "fluffy": 17054, + "fluid": 43803, + "fluid": 16717, + "fluids": 41490, + "fluor": 45127, + "fluore": 26974, + "fluorescent": 35036, + "fluori": 45611, + "flur": 31591, + "flush": 25777, + "flushing": 43754, + "flute": 23746, + "flux": 25249, + "flwx": 30907, + "fly": 5666, + "fly": 3228, + "flye": 30873, + "flyeagles": 39927, + "flyeaglesfly": 39931, + "flyer": 11875, + "flyers": 14181, + "flyfishing": 31800, + "flying": 20782, + "flying": 4610, + "flyn": 40676, + "flynn": 15721, + "flyo": 33506, + "flyover": 38083, + "fm": 13715, + "fm": 3689, + "fman": 25152, + "fml": 26730, + "fmr": 32875, + "fn": 22773, + "fn": 21763, + "fnc": 46506, + "fo": 898, + "fo": 6157, + "foal": 40386, + "foam": 30039, + "foam": 14587, + "foamed": 26711, + "fob": 40315, + "focal": 30934, + "focu": 5827, + "focus": 4353, + "focused": 9319, + "focuses": 20093, + "focusing": 15551, + "fod": 31015, + "fod": 43299, + "fodils": 44411, + "foe": 22952, + "foes": 46279, + "fog": 9417, + "foggy": 19770, + "foil": 17302, + "fol": 1106, + "fol": 48616, + "fold": 35201, + "fold": 11021, + "foldable": 48307, + "folded": 25233, + "folder": 25717, + "folding": 15464, + "folds": 24266, + "foley": 22850, + "foli": 7713, + "folia": 48964, + "foliage": 26350, + "folio": 10772, + "folk": 10665, + "folk": 6032, + "folke": 47190, + "folkl": 27273, + "folklore": 22133, + "folklore": 28620, + "folklorethursday": 23270, + "folks": 5422, + "follo": 41417, + "follow": 1964, + "follow": 1979, + "followart": 40957, + "followback": 33863, + "followed": 6499, + "follower": 17039, + "followers": 4856, + "following": 3473, + "followme": 29668, + "followparty": 44757, + "follows": 11287, + "followthe": 30747, + "folly": 41408, + "folsom": 42108, + "fom": 34540, + "fon": 5017, + "fon": 38318, + "fond": 19964, + "fonda": 44609, + "fondue": 48321, + "fone": 40672, + "font": 37610, + "font": 16248, + "fontaine": 37864, + "fontana": 43643, + "fontein": 45062, + "fonts": 32801, + "foo": 1183, + "foo": 23435, + "food": 4586, + "food": 1559, + "foodand": 38317, + "foodbank": 31926, + "foodie": 30762, + "foodie": 9847, + "foodies": 22416, + "foodnetwork": 46793, + "foods": 7057, + "foodsecurity": 49329, + "foodtruck": 47682, + "fool": 23959, + "fool": 12212, + "fooled": 28761, + "fooling": 47964, + "foolish": 33824, + "fools": 15946, + "foot": 6702, + "foot": 4738, + "footage": 11130, + "footb": 33466, + "football": 9376, + "football": 1882, + "footballer": 20646, + "footballers": 30269, + "footed": 38040, + "footh": 25951, + "foothills": 37020, + "footpath": 48858, + "footprint": 23206, + "footprints": 39640, + "footsteps": 27289, + "footwear": 22772, + "footy": 39866, + "footy": 18922, + "for": 645, + "for": 556, + "forage": 46871, + "foraging": 39056, + "forall": 17824, + "forbe": 49098, + "forbes": 13925, + "forbi": 24754, + "forbidden": 25164, + "force": 12068, + "force": 2869, + "forced": 8201, + "forces": 5381, + "forchange": 35848, + "forcing": 21573, + "ford": 3751, + "ford": 1623, + "fordfc": 28581, + "fordham": 48792, + "fords": 29351, + "fordshire": 14645, + "fore": 1484, + "fore": 1332, + "forec": 34155, + "forecast": 7361, + "forecasting": 38133, + "forecasts": 27696, + "foreclo": 44916, + "forefront": 37679, + "foreground": 35186, + "forehead": 25394, + "foreig": 26497, + "foreign": 42255, + "foreign": 6046, + "foreigners": 38549, + "foreman": 36174, + "foremost": 42128, + "foren": 16526, + "forensic": 23158, + "forensics": 38763, + "forest": 18760, + "forest": 4167, + "forestation": 33939, + "forestry": 26281, + "forests": 14095, + "forever": 14748, + "forever": 3225, + "forevery": 40605, + "forex": 40200, + "forex": 17395, + "forfe": 44871, + "forge": 19232, + "forged": 28105, + "forget": 46153, + "forget": 2678, + "forgets": 35613, + "forgetting": 25452, + "forgi": 22080, + "forgive": 15332, + "forgiven": 44894, + "forgiveness": 23585, + "forgood": 39169, + "forgot": 6483, + "forgotten": 7994, + "fork": 24501, + "fork": 13700, + "forkids": 48571, + "forklift": 43202, + "forks": 28769, + "forlife": 17624, + "form": 1157, + "form": 1907, + "forma": 38829, + "formal": 12978, + "formally": 24867, + "format": 16252, + "format": 11874, + "formation": 2510, + "formations": 37715, + "formative": 48882, + "formats": 32085, + "forme": 42085, + "formed": 6528, + "former": 2276, + "formerly": 20866, + "formid": 38599, + "formidable": 39834, + "forming": 15443, + "formity": 42290, + "forms": 5161, + "formu": 8689, + "formul": 23923, + "formula": 24485, + "formula": 10776, + "formulae": 34586, + "formulated": 45066, + "forre": 38876, + "forrest": 25205, + "forrester": 45338, + "forsa": 48958, + "forsale": 13303, + "forster": 42923, + "forsy": 29629, + "forsyth": 40952, + "fort": 12300, + "fort": 2921, + "forte": 44350, + "forte": 27367, + "forth": 17068, + "forth": 11932, + "forthcoming": 19989, + "forthe": 12521, + "forti": 26984, + "fortified": 46486, + "fortn": 14428, + "fortnight": 39235, + "fortnite": 38734, + "fortnite": 17890, + "fortress": 19988, + "fortun": 6950, + "fortunate": 19898, + "fortunately": 34358, + "fortune": 40931, + "fortune": 11451, + "fortunes": 41989, + "forty": 24399, + "forum": 37851, + "forum": 4538, + "forums": 31518, + "forwar": 34364, + "forward": 47031, + "forward": 2342, + "forwards": 38974, + "foryou": 35150, + "forz": 46056, + "forza": 33293, + "forza": 28089, + "fos": 36925, + "fos": 22081, + "foss": 14240, + "foss": 37911, + "fossil": 20419, + "fossil": 15202, + "fossilfriday": 26079, + "fossils": 30652, + "foster": 26778, + "foster": 8139, + "fostering": 35996, + "fosters": 37644, + "foto": 15908, + "foto": 12823, + "fotogra": 23687, + "fotografia": 40256, + "fotos": 26124, + "fou": 14516, + "fought": 10844, + "foul": 19784, + "foun": 3154, + "found": 3454, + "found": 1546, + "foundation": 4058, + "foundations": 25219, + "founded": 12240, + "founder": 5145, + "founders": 14602, + "founding": 15317, + "foundry": 31426, + "fountain": 44863, + "fountain": 13405, + "fountains": 37411, + "four": 5113, + "four": 2721, + "foursquare": 34484, + "fourteen": 46255, + "fourth": 7516, + "fourthofjuly": 47805, + "fow": 17084, + "fowl": 31685, + "fowler": 20980, + "fox": 5007, + "fox": 3240, + "foxandfriends": 45841, + "foxes": 24145, + "foxnews": 18830, + "foxsports": 39267, + "foxtv": 49396, + "foxx": 32993, + "foxy": 27945, + "foy": 30284, + "foyer": 38011, + "foyle": 47902, + "fp": 28058, + "fp": 8941, + "fpl": 27970, + "fpp": 36464, + "fps": 25300, + "fpv": 43175, + "fr": 936, + "fr": 5512, + "fra": 3368, + "fra": 15644, + "frac": 15607, + "fracking": 21894, + "fractal": 46471, + "fraction": 26788, + "fractu": 25847, + "fracture": 28995, + "fractured": 37421, + "fractures": 46213, + "frag": 13093, + "fragile": 23579, + "fragment": 39209, + "fragments": 41424, + "fragr": 15403, + "fragrance": 17874, + "fragrances": 44567, + "fragrant": 37030, + "fram": 27987, + "frame": 11029, + "frame": 6481, + "framed": 13135, + "frames": 15479, + "framework": 13195, + "frameworks": 43136, + "framing": 24539, + "frampton": 41733, + "fran": 2118, + "fran": 18878, + "franc": 3872, + "franc": 42340, + "franca": 48952, + "france": 12045, + "france": 3552, + "frances": 20803, + "francesca": 32327, + "francesco": 25816, + "franch": 11756, + "franchi": 46438, + "franchise": 13664, + "franci": 46458, + "francis": 22187, + "francis": 7660, + "francisco": 6887, + "franco": 17934, + "franco": 17052, + "francois": 29317, + "frank": 5390, + "frank": 5229, + "franken": 20487, + "franken": 48252, + "frankenstein": 26410, + "frankfur": 17442, + "frankfurt": 18598, + "franki": 39227, + "frankie": 38373, + "frankie": 16215, + "franklin": 40935, + "franklin": 9999, + "frankly": 38015, + "franks": 42855, + "frans": 47892, + "franz": 25449, + "franç": 38381, + "fraser": 39082, + "fraser": 16754, + "frat": 15225, + "frat": 39292, + "fraternity": 24433, + "frau": 23063, + "fraud": 40647, + "fraud": 9961, + "fraudul": 42655, + "fraudulent": 47408, + "fray": 41154, + "frazier": 32841, + "frc": 41507, + "fre": 821, + "fre": 43165, + "freak": 20352, + "freak": 13701, + "freaked": 43511, + "freakin": 23900, + "freaking": 11992, + "freaks": 27009, + "freaky": 31583, + "freck": 33328, + "freckles": 48036, + "fred": 9486, + "fred": 6678, + "freddie": 41890, + "freddie": 17014, + "freddy": 24394, + "freder": 10745, + "frederic": 41165, + "frederick": 37103, + "frederick": 18570, + "fredo": 48241, + "free": 2065, + "free": 1139, + "freebie": 35865, + "freebies": 28630, + "freec": 46569, + "freed": 12585, + "freed": 23392, + "freedom": 17992, + "freedom": 4511, + "freedoms": 32500, + "freef": 48678, + "freel": 14174, + "freelance": 21942, + "freely": 24436, + "freeman": 16450, + "freep": 32499, + "freepalestine": 39242, + "freer": 44676, + "frees": 27455, + "freestyle": 15594, + "freeway": 24927, + "freeze": 14187, + "freezer": 25390, + "freezing": 12499, + "frei": 30183, + "freight": 17023, + "fremantle": 48012, + "fremont": 34578, + "fren": 2919, + "french": 13118, + "french": 3461, + "frenzy": 30084, + "frequ": 9211, + "frequencies": 45319, + "frequency": 18825, + "frequent": 19836, + "frequently": 22434, + "fresco": 31609, + "fresh": 4065, + "fresh": 2975, + "fresher": 49284, + "freshers": 35810, + "freshest": 46809, + "freshly": 16081, + "freshman": 9381, + "freshmen": 21292, + "freshness": 45872, + "freshwater": 24803, + "fresno": 40879, + "fresno": 20995, + "fret": 40510, + "freud": 40787, + "frey": 22136, + "frey": 9082, + "fri": 815, + "fri": 6882, + "friars": 30513, + "fric": 18981, + "frick": 46304, + "friction": 38563, + "frid": 46388, + "frida": 36001, + "friday": 6350, + "friday": 1461, + "fridayfeeling": 11952, + "fridaymotivation": 38544, + "fridaynight": 44858, + "fridayreads": 37736, + "fridays": 15589, + "fridaythe": 47642, + "fridge": 13491, + "fridges": 40734, + "frie": 36999, + "fried": 13743, + "fried": 7310, + "friedman": 29402, + "friedrich": 34171, + "friend": 3017, + "friend": 1625, + "friendly": 44612, + "friendly": 4681, + "friends": 38875, + "friends": 1574, + "friendship": 42674, + "friendship": 7679, + "friendships": 28840, + "fries": 11369, + "frifotos": 40493, + "friger": 20785, + "friggin": 48300, + "frigh": 34831, + "fright": 24277, + "fright": 40207, + "frightened": 47136, + "frightening": 39290, + "fringe": 10640, + "fris": 37252, + "frisbee": 45768, + "frisco": 35945, + "frit": 34614, + "fritz": 29860, + "friyay": 38887, + "frm": 12951, + "fro": 626, + "fro": 26603, + "frock": 45306, + "frog": 26494, + "frog": 11438, + "frogs": 20781, + "from": 8330, + "from": 633, + "frome": 48691, + "fromhome": 41477, + "fromthe": 18756, + "fron": 1847, + "fron": 18036, + "front": 10996, + "front": 2184, + "frontal": 35794, + "frontier": 18253, + "frontiers": 38396, + "frontline": 29589, + "frontman": 36775, + "fronts": 26846, + "froome": 48560, + "frosh": 47069, + "frost": 39420, + "frost": 11619, + "frosted": 35988, + "frosting": 33872, + "frosty": 22760, + "froze": 47788, + "frozen": 42464, + "frozen": 8507, + "frs": 26216, + "fru": 3248, + "fruit": 16771, + "fruit": 5190, + "fruitful": 31494, + "fruits": 13282, + "fruity": 22320, + "frustr": 16046, + "frustrated": 25111, + "frustrating": 31342, + "frustration": 30535, + "fry": 33914, + "fry": 13686, + "fryer": 49217, + "frying": 38516, + "fs": 23699, + "fs": 3854, + "fsa": 33373, + "fsu": 44185, + "fsu": 19317, + "ft": 3391, + "ft": 981, + "fta": 41975, + "ftc": 33752, + "fted": 5612, + "fter": 25063, + "fthe": 22886, + "ftheday": 9823, + "fting": 6174, + "fton": 26605, + "ftp": 42649, + "fts": 3767, + "ftse": 46717, + "ftw": 19298, + "fty": 17494, + "fu": 665, + "fu": 9098, + "fuch": 42617, + "fudge": 24270, + "fue": 43723, + "fuego": 41500, + "fuel": 21113, + "fuel": 5945, + "fueled": 28792, + "fueling": 38793, + "fuelled": 48357, + "fuels": 19365, + "fuentes": 44393, + "fuer": 29645, + "fug": 29227, + "fugitive": 39257, + "fuji": 15573, + "fuji": 21634, + "fujifilm": 24765, + "fuk": 31051, + "fuku": 20728, + "fukushima": 33929, + "ful": 1814, + "ful": 857, + "fulbright": 41834, + "fulfill": 43675, + "fulfill": 27467, + "fulfilled": 29919, + "fulfilling": 30621, + "fulfillment": 45573, + "fulham": 25574, + "full": 9407, + "full": 1476, + "fuller": 20225, + "fullerton": 42822, + "fullest": 35603, + "fully": 39142, + "fully": 2401, + "fulness": 10526, + "fuls": 41606, + "fulton": 26725, + "fum": 38393, + "fumble": 49373, + "fun": 1229, + "fun": 1499, + "func": 8679, + "function": 8093, + "functional": 12885, + "functionality": 33316, + "functioning": 25479, + "functions": 18001, + "fund": 19089, + "fund": 4877, + "fundam": 11670, + "fundament": 18852, + "fundamental": 17627, + "fundamentally": 45378, + "fundamentals": 27887, + "funday": 15439, + "funded": 10588, + "funding": 5588, + "fundra": 6201, + "fundraiser": 10049, + "fundraising": 10755, + "funds": 7066, + "funer": 40693, + "funeral": 10606, + "funfact": 31596, + "funfactfriday": 40710, + "fungal": 38838, + "fungi": 27837, + "fungus": 30677, + "funk": 37353, + "funk": 13372, + "funko": 49402, + "funko": 23697, + "funky": 16492, + "funnel": 27862, + "funnier": 42232, + "funniest": 15557, + "funny": 19124, + "funny": 3789, + "funrun": 34185, + "fur": 2395, + "fur": 9686, + "furi": 40816, + "furious": 17522, + "furman": 49238, + "furn": 21348, + "furnace": 31913, + "furnished": 37388, + "furnitu": 45696, + "furniture": 7993, + "furry": 33414, + "furry": 15351, + "fursuit": 25306, + "fursuit": 43083, + "fursuitfriday": 27917, + "further": 5583, + "fury": 14404, + "fus": 18419, + "fuse": 23386, + "fused": 38994, + "fusion": 44661, + "fusion": 9364, + "fuss": 26331, + "fut": 21460, + "fut": 34049, + "futbol": 33014, + "futsal": 20558, + "futu": 33454, + "futur": 38840, + "future": 7959, + "future": 1904, + "futureof": 22599, + "futureofwork": 33202, + "futures": 13488, + "futuri": 19068, + "futurism": 48435, + "futurist": 48086, + "futuristic": 30987, + "fuzz": 47128, + "fuzz": 40443, + "fuzzy": 25876, + "fv": 29795, + "fw": 23934, + "fw": 5277, + "fwd": 27052, + "fx": 17807, + "fx": 9025, + "fy": 8440, + "fy": 2702, + "fyi": 16014, + "fying": 5294, + "fz": 46400, + "fé": 34072, + "g": 70, + "g": 326, + "ga": 1275, + "ga": 1531, + "gaa": 10715, + "gaal": 40867, + "gaard": 24645, + "gab": 3927, + "gab": 37382, + "gabbana": 36272, + "gabby": 48115, + "gabby": 24567, + "gabe": 18916, + "gabi": 41931, + "gable": 33387, + "gables": 40928, + "gabri": 8311, + "gabriel": 31684, + "gabriel": 13244, + "gabrielle": 33572, + "gaby": 46420, + "gac": 32520, + "gad": 7786, + "gad": 44651, + "gadget": 25525, + "gadgets": 22840, + "gado": 29489, + "gae": 22003, + "gael": 35663, + "gaelic": 31173, + "gaf": 21354, + "gaf": 32670, + "gag": 14121, + "gag": 18844, + "gaga": 9782, + "gage": 21081, + "gah": 27750, + "gai": 24214, + "gai": 25153, + "gaia": 41269, + "gail": 41160, + "gail": 27676, + "gain": 21536, + "gain": 6202, + "gaine": 35747, + "gained": 14489, + "gaines": 49225, + "gainesville": 40427, + "gaining": 15260, + "gains": 42751, + "gains": 12107, + "gal": 2001, + "gal": 4488, + "gala": 7211, + "galac": 18864, + "galactic": 25514, + "galap": 41115, + "galapagos": 44057, + "galat": 39853, + "galatasar": 42413, + "galatasaray": 47787, + "galax": 5647, + "galaxies": 32435, + "galaxy": 32130, + "galaxy": 6545, + "gale": 37658, + "gale": 21380, + "galerie": 44539, + "gales": 48633, + "gali": 17546, + "gali": 30552, + "galicia": 47927, + "galileo": 39671, + "gall": 3011, + "gall": 33374, + "galla": 16847, + "gallagher": 19168, + "galleria": 40656, + "galleries": 22304, + "gallery": 36648, + "gallery": 3830, + "galley": 48917, + "galli": 22568, + "gallipoli": 47249, + "gallo": 37350, + "gallo": 33265, + "gallon": 24615, + "gallons": 29335, + "galloway": 27796, + "galore": 22286, + "gals": 20125, + "galvani": 46046, + "galve": 34328, + "galveston": 36003, + "galway": 38045, + "galway": 17112, + "gam": 1162, + "gam": 34195, + "gama": 35873, + "gambia": 32988, + "gamble": 26121, + "gambling": 20287, + "game": 2882, + "game": 1063, + "gameart": 31490, + "gameboy": 40951, + "gamecube": 44079, + "gameday": 9241, + "gamedev": 7544, + "gameinsight": 42626, + "gameof": 10987, + "gameofthrones": 11822, + "gameon": 47691, + "gameplay": 16794, + "gamer": 12595, + "gamer": 11598, + "gamergate": 25961, + "gamers": 16166, + "gamersunite": 26423, + "games": 18551, + "games": 1955, + "gamescom": 37003, + "gamestop": 39436, + "gametime": 45899, + "gami": 42025, + "gamification": 48908, + "gaming": 28803, + "gaming": 4017, + "gamma": 22180, + "gamo": 39325, + "gan": 1822, + "gan": 1670, + "gand": 8399, + "ganda": 27261, + "gander": 44508, + "gandhi": 12322, + "ganesh": 30362, + "ganesha": 45185, + "gang": 8066, + "gang": 5674, + "ganga": 36275, + "gangnam": 46777, + "gangs": 29844, + "gangsta": 37365, + "gangster": 26514, + "gani": 48324, + "gann": 45665, + "gannon": 45837, + "gano": 25304, + "gao": 26556, + "gaon": 19279, + "gap": 29906, + "gap": 7609, + "gaps": 25296, + "gar": 1099, + "gar": 5824, + "gara": 28710, + "garage": 8474, + "garbage": 13760, + "garci": 44658, + "garcia": 10529, + "gard": 7751, + "gard": 21003, + "garda": 31906, + "garde": 22649, + "garden": 4674, + "garden": 2756, + "gardenchat": 46292, + "gardener": 28554, + "gardeners": 38205, + "gardening": 10483, + "gardens": 6152, + "gardiner": 43121, + "gardner": 18710, + "gare": 5633, + "gare": 48402, + "gareth": 37140, + "gareth": 18175, + "garfield": 26728, + "garh": 16762, + "gari": 40898, + "gari": 43080, + "garis": 37839, + "garland": 23418, + "garlic": 9685, + "garment": 31418, + "garments": 43341, + "garmin": 39885, + "garner": 20340, + "garnet": 37669, + "garo": 30388, + "garrett": 15881, + "garri": 21764, + "garrison": 30108, + "garros": 40425, + "garry": 24398, + "gars": 12055, + "gart": 18380, + "gart": 18751, + "garten": 14684, + "garter": 48420, + "garth": 45398, + "garth": 24469, + "gartner": 43334, + "gartner": 29678, + "garty": 46383, + "garu": 31140, + "garvey": 39511, + "garwal": 38623, + "gary": 10535, + "gary": 4516, + "garza": 49393, + "gas": 5047, + "gas": 2474, + "gases": 36971, + "gasoline": 27691, + "gasp": 43762, + "gaston": 40669, + "gastri": 49197, + "gastro": 23740, + "gastron": 30699, + "gastronomy": 46987, + "gat": 5314, + "gat": 18941, + "gata": 44575, + "gate": 8071, + "gate": 3302, + "gated": 23997, + "gates": 9472, + "gateshead": 40051, + "gateway": 45221, + "gateway": 14943, + "gather": 36345, + "gather": 12602, + "gathered": 14646, + "gathering": 9197, + "gatherings": 48096, + "gathers": 39250, + "gating": 27561, + "gation": 11095, + "gations": 33906, + "gato": 44492, + "gator": 20216, + "gator": 16390, + "gatorade": 36354, + "gators": 17173, + "gatory": 24796, + "gatsby": 32586, + "gatwick": 37122, + "gau": 5919, + "gau": 43068, + "gauge": 18728, + "gaunt": 31862, + "gauntlet": 37163, + "gautam": 45853, + "gautam": 31356, + "gauteng": 40333, + "gav": 8966, + "gave": 3485, + "gavin": 32974, + "gavin": 16389, + "gaw": 15405, + "gawd": 43239, + "gawx": 43420, + "gay": 7460, + "gay": 5627, + "gaya": 39477, + "gaye": 41401, + "gayle": 29998, + "gayo": 36768, + "gays": 28001, + "gaz": 4837, + "gaz": 36475, + "gaza": 38391, + "gaza": 10112, + "gazaunderattack": 42458, + "gaze": 23212, + "gazette": 20443, + "gazing": 28373, + "gb": 8727, + "gb": 4619, + "gba": 18528, + "gbbo": 34474, + "gbc": 42993, + "gbp": 27391, + "gbr": 31984, + "gby": 40509, + "gc": 8577, + "gc": 6043, + "gcc": 26804, + "gcse": 28763, + "gcu": 34137, + "gd": 13264, + "gd": 14604, + "gdc": 32793, + "gden": 44928, + "gdp": 17100, + "gdpr": 22963, + "ge": 619, + "ge": 710, + "gea": 26790, + "gear": 15532, + "gear": 4802, + "gearbox": 42454, + "geared": 33903, + "gearing": 19027, + "gears": 21147, + "geaux": 36313, + "gecko": 38616, + "ged": 17252, + "ged": 3480, + "geddon": 31720, + "gedly": 13991, + "gee": 9806, + "gee": 9071, + "geek": 17920, + "geek": 7135, + "geeks": 20110, + "geeky": 47332, + "geel": 25906, + "geelong": 34555, + "gees": 38088, + "geese": 26413, + "geez": 42394, + "geh": 30320, + "geist": 38290, + "gel": 7343, + "gel": 5697, + "gelato": 29577, + "gels": 42552, + "gely": 14637, + "gem": 14261, + "gem": 7613, + "gement": 19495, + "gemini": 23086, + "gemma": 23952, + "gems": 14355, + "gemstone": 27747, + "gemstones": 43972, + "gen": 1024, + "gen": 3278, + "gence": 16088, + "gency": 5245, + "gend": 33247, + "gender": 22976, + "gender": 5906, + "gendere": 35824, + "genderequality": 43338, + "gene": 5822, + "gene": 7962, + "genealo": 24142, + "genealogy": 29381, + "gener": 1832, + "general": 20576, + "general": 3658, + "generally": 19256, + "generals": 30296, + "generate": 16896, + "generated": 19450, + "generates": 33938, + "generating": 23882, + "generation": 41211, + "generation": 4883, + "generational": 34506, + "generations": 12247, + "generative": 29472, + "generator": 19399, + "generators": 41917, + "generic": 26978, + "generosity": 23015, + "generous": 12570, + "generously": 35113, + "genes": 19683, + "genesis": 13518, + "genetic": 47746, + "genetic": 13578, + "genetically": 36745, + "genetics": 18276, + "geneva": 14799, + "genevie": 41633, + "genevieve": 46584, + "geni": 22334, + "genic": 15750, + "genie": 24221, + "genital": 32960, + "genius": 8235, + "geniuses": 41406, + "geno": 41544, + "geno": 46776, + "genoa": 43993, + "genoci": 14687, + "genocide": 15903, + "genome": 23991, + "genomic": 44371, + "genomics": 26227, + "genre": 14249, + "genres": 30340, + "gens": 17449, + "gent": 3685, + "gent": 7139, + "gente": 34325, + "gentle": 7262, + "gentle": 13577, + "gentleman": 13293, + "gentlemen": 11692, + "gently": 17187, + "gento": 28320, + "gentri": 41148, + "gentry": 47225, + "gents": 18862, + "genu": 9182, + "genuine": 12184, + "genuinely": 20006, + "genus": 38161, + "geny": 35323, + "geo": 5038, + "geo": 11604, + "geocaching": 47908, + "geof": 20629, + "geoff": 33697, + "geoff": 20386, + "geoffrey": 29520, + "geograph": 45920, + "geographic": 22635, + "geographical": 39380, + "geography": 17101, + "geological": 38380, + "geology": 21578, + "geom": 46135, + "geome": 12958, + "geometric": 22419, + "geometry": 21731, + "geon": 20844, + "geon": 7295, + "geons": 15914, + "geopol": 39758, + "geor": 2549, + "georg": 43126, + "george": 8377, + "george": 3296, + "georges": 25042, + "georgetown": 22970, + "georgie": 42115, + "georgina": 43892, + "geospatial": 46238, + "geothermal": 38413, + "geous": 3068, + "ger": 1291, + "ger": 1502, + "gera": 48867, + "gerald": 29901, + "gerald": 13269, + "gerard": 35979, + "gerard": 20826, + "gerber": 45058, + "gered": 40179, + "geri": 41664, + "geri": 46214, + "gering": 24077, + "germain": 38786, + "german": 14972, + "german": 4710, + "germans": 28400, + "germany": 4464, + "germin": 44721, + "germs": 47731, + "geronimo": 45171, + "gerrard": 26538, + "gerry": 29825, + "gerry": 23026, + "gers": 3314, + "gertrude": 46950, + "gervais": 36527, + "gery": 32845, + "ges": 3316, + "gest": 11843, + "gest": 2033, + "gesture": 21780, + "gestures": 43524, + "get": 5670, + "get": 779, + "geta": 13155, + "getaway": 16131, + "gether": 27224, + "getic": 20661, + "getin": 25822, + "getit": 44891, + "getit": 48315, + "getoutside": 35644, + "gets": 39448, + "gets": 2127, + "gett": 6647, + "gett": 27965, + "gettable": 15620, + "gette": 29800, + "gettin": 13428, + "getting": 30885, + "getting": 1500, + "getty": 31185, + "getty": 13965, + "gettys": 35189, + "gettysburg": 37062, + "getyour": 42159, + "gey": 29289, + "gf": 28953, + "gf": 10846, + "gfriend": 35245, + "gfs": 37553, + "gg": 1129, + "gg": 3286, + "gga": 26003, + "ggan": 25626, + "gge": 21521, + "gge": 31659, + "gged": 6095, + "gger": 12367, + "gger": 3493, + "ggers": 7480, + "ggg": 20143, + "gggg": 33513, + "ggi": 21662, + "ggin": 17160, + "gging": 4966, + "ggins": 12444, + "ggle": 34981, + "ggle": 11430, + "ggled": 46328, + "ggles": 14703, + "ggling": 16523, + "ggly": 39407, + "ggs": 4797, + "ggy": 24935, + "ggy": 6476, + "gh": 583, + "gh": 790, + "gha": 10010, + "gha": 25183, + "gham": 21456, + "ghan": 18945, + "ghan": 6624, + "ghana": 30330, + "ghana": 9731, + "ghanaian": 34223, + "ghani": 36699, + "ghar": 37334, + "ghar": 36973, + "ghat": 43989, + "ghaz": 37493, + "ghc": 42139, + "ghe": 10754, + "ghe": 28561, + "ghead": 40783, + "ghee": 34794, + "gher": 21542, + "gher": 14796, + "ghet": 18447, + "ghetti": 17485, + "ghetto": 22403, + "ghi": 22436, + "ghi": 22279, + "ghibli": 40555, + "ghj": 38439, + "ghlin": 24131, + "gho": 4307, + "ghorn": 38094, + "ghosh": 43279, + "ghoshal": 49134, + "ghost": 11417, + "ghost": 7108, + "ghostbusters": 25462, + "ghostly": 44901, + "ghosts": 16737, + "ghou": 35843, + "ghoul": 45302, + "ghouse": 38238, + "ghs": 14157, + "ght": 1413, + "ght": 630, + "ghted": 4963, + "ghter": 2427, + "ghters": 12994, + "ghtful": 8334, + "ghting": 3019, + "ghtly": 6993, + "ghtning": 39740, + "ghton": 16353, + "ghts": 1259, + "ghty": 20968, + "ghty": 5866, + "ghu": 25808, + "ghue": 45675, + "ghyun": 25010, + "ghz": 24325, + "gi": 707, + "gi": 4478, + "gia": 8864, + "giac": 35444, + "giam": 39623, + "gian": 17274, + "gian": 12866, + "gianni": 46752, + "giant": 23668, + "giant": 4687, + "giants": 7076, + "giar": 34241, + "gib": 9816, + "gibb": 18964, + "gibbons": 31974, + "gibbs": 26488, + "gibility": 33297, + "gible": 13159, + "gibr": 20206, + "gibraltar": 23988, + "gibson": 37420, + "gibson": 12178, + "gic": 27900, + "gic": 2570, + "gical": 32973, + "gically": 26320, + "gid": 36774, + "gid": 21413, + "giddy": 40894, + "gideon": 43867, + "gidi": 30603, + "gie": 11459, + "gie": 3991, + "gier": 28974, + "gies": 5505, + "gif": 11363, + "gif": 11677, + "gifford": 47850, + "gifs": 37643, + "gift": 20569, + "gift": 2733, + "gifted": 15110, + "giftide": 20152, + "giftideas": 23487, + "gifting": 39546, + "gifts": 5836, + "gig": 26981, + "gig": 7471, + "gigab": 34530, + "gigan": 24104, + "gigantic": 31507, + "giggle": 36426, + "giggles": 42731, + "giggs": 44692, + "gigi": 44106, + "gigi": 26171, + "gigs": 20316, + "gil": 3997, + "gil": 10088, + "gila": 46952, + "gilbert": 14154, + "gilded": 44341, + "giles": 24802, + "gill": 14280, + "gill": 12003, + "gille": 29610, + "gilles": 39590, + "gillespie": 36242, + "gillette": 38603, + "gilli": 13695, + "gillian": 28753, + "gills": 48851, + "gilmore": 27603, + "gilt": 44378, + "gim": 31284, + "gimm": 40692, + "gimme": 21525, + "gin": 3374, + "gin": 4941, + "gina": 15604, + "gine": 27482, + "ging": 10829, + "ging": 3905, + "ginger": 16287, + "ginger": 9718, + "gingerbread": 23692, + "gini": 35768, + "gino": 36521, + "gins": 18328, + "gio": 16329, + "gio": 8050, + "gion": 41226, + "gior": 14920, + "giorgio": 33271, + "giorno": 33310, + "gios": 41927, + "gious": 14419, + "giov": 21404, + "giovanni": 26574, + "gipp": 41351, + "gir": 1077, + "gir": 25481, + "gira": 16949, + "giraffe": 22826, + "giri": 31709, + "girl": 3914, + "girl": 1611, + "girlfriend": 8217, + "girlfriends": 30736, + "girlpower": 37433, + "girls": 15480, + "girls": 1917, + "girly": 29605, + "giro": 39664, + "giro": 26454, + "girona": 47842, + "giroud": 41177, + "gis": 16266, + "gis": 12773, + "gist": 21241, + "git": 16060, + "git": 20918, + "gita": 40838, + "github": 31196, + "giu": 17931, + "giuli": 29762, + "giuliani": 47739, + "giuse": 29385, + "giuseppe": 33563, + "give": 4120, + "give": 1781, + "giveaway": 5310, + "giveaways": 18974, + "giveback": 41385, + "given": 33323, + "given": 4302, + "givenchy": 38245, + "giver": 43339, + "gives": 3926, + "giveup": 35485, + "giving": 14673, + "giving": 2339, + "givingback": 49300, + "givingtuesday": 23556, + "giz": 29237, + "gk": 38953, + "gk": 18719, + "gl": 1849, + "gl": 14751, + "gla": 1523, + "gla": 36904, + "glaci": 14924, + "glacial": 40782, + "glacier": 19282, + "glaciers": 42528, + "glad": 20841, + "glad": 4761, + "glades": 37432, + "gladi": 21742, + "gladiator": 38477, + "gladiators": 41087, + "gladly": 41598, + "gladys": 43168, + "glam": 8738, + "glam": 16905, + "glamorous": 22896, + "glamour": 42876, + "glamour": 17499, + "glamping": 46167, + "glan": 40482, + "glan": 45844, + "glance": 26557, + "gland": 41441, + "glar": 48535, + "glar": 41702, + "glare": 46035, + "glas": 29935, + "glas": 43654, + "glasgo": 6757, + "glasgow": 29990, + "glasgow": 7363, + "glass": 16305, + "glass": 3313, + "glasses": 6116, + "glaston": 26848, + "glastonbury": 28233, + "glau": 39171, + "glaze": 28112, + "glazed": 24122, + "gle": 7166, + "gle": 2865, + "glee": 32379, + "glee": 21614, + "glen": 6158, + "glen": 11049, + "glend": 38332, + "glendale": 33043, + "glenn": 32004, + "glenn": 12861, + "gler": 34649, + "gley": 21998, + "gli": 5896, + "gli": 28791, + "glia": 22217, + "glide": 37321, + "glider": 41636, + "glimp": 12888, + "glimpse": 13817, + "glio": 29785, + "glit": 21079, + "glitch": 29563, + "glitter": 16528, + "glitz": 44542, + "glo": 1721, + "glo": 30474, + "glob": 13363, + "global": 6707, + "global": 2779, + "globalgoals": 33211, + "globalhealth": 46751, + "globalization": 47680, + "globally": 17775, + "globalwarming": 46017, + "globe": 19436, + "globe": 9368, + "globes": 38085, + "glock": 38818, + "glomer": 43689, + "gloom": 48594, + "gloomy": 32199, + "glori": 7270, + "gloria": 19244, + "glorious": 9171, + "glory": 36107, + "glory": 7285, + "glos": 40633, + "gloss": 38258, + "gloss": 22014, + "glossy": 29802, + "glou": 15989, + "gloucester": 28133, + "gloucester": 23835, + "gloucestershire": 33789, + "glove": 16078, + "glover": 21594, + "gloves": 12363, + "glow": 30472, + "glow": 10111, + "glowing": 18437, + "glows": 48107, + "glu": 5952, + "glu": 32281, + "glucose": 34642, + "glue": 22103, + "glued": 38135, + "gluten": 15482, + "gluten": 15524, + "glutenfree": 16138, + "gly": 13027, + "glycer": 48914, + "gm": 18743, + "gm": 5918, + "gma": 18155, + "gmail": 11119, + "gman": 41043, + "gman": 36936, + "gmb": 35934, + "gmb": 31799, + "gmbh": 46877, + "gmc": 27257, + "gmo": 23486, + "gms": 36987, + "gmt": 13803, + "gn": 2455, + "gn": 9831, + "gna": 23009, + "gnation": 45912, + "gne": 25407, + "gni": 5104, + "gnment": 25110, + "gno": 23376, + "gno": 43686, + "gnocchi": 48299, + "gnome": 33643, + "gnon": 20561, + "go": 650, + "go": 861, + "goa": 14399, + "goal": 9003, + "goal": 3321, + "goalie": 20723, + "goalkeeper": 16601, + "goals": 3295, + "goalscorer": 43547, + "goaltender": 44151, + "goat": 34082, + "goat": 9530, + "goats": 18393, + "gob": 29559, + "gobeavs": 48285, + "goblin": 26223, + "goblue": 25232, + "gobucks": 29175, + "gocougs": 34202, + "god": 4190, + "god": 1731, + "godawgs": 40436, + "godbless": 46616, + "godbless": 44007, + "godd": 16589, + "goddamn": 28495, + "goddard": 37827, + "goddess": 10808, + "godfather": 26222, + "godfrey": 40148, + "godis": 38521, + "godly": 42438, + "gods": 33620, + "gods": 10328, + "goducks": 35889, + "godzilla": 23369, + "goe": 22084, + "goers": 27784, + "goes": 43581, + "goes": 2635, + "gof": 17537, + "goff": 34399, + "goftheday": 39360, + "gofund": 34445, + "gofundme": 34686, + "gog": 42949, + "goggles": 31027, + "gogh": 19697, + "gogo": 22688, + "gogreen": 36279, + "gohawks": 34884, + "goi": 24917, + "goin": 13939, + "going": 25787, + "going": 1245, + "goku": 29550, + "gol": 1537, + "gol": 18257, + "gola": 41090, + "gold": 4999, + "gold": 2209, + "goldberg": 25161, + "goldcoast": 34634, + "golden": 10763, + "golden": 3878, + "goldeng": 20650, + "goldenglobes": 26842, + "goldfish": 40293, + "goldie": 42805, + "goldman": 27164, + "golds": 30526, + "golds": 40283, + "goldsmith": 40214, + "gole": 41297, + "golf": 9096, + "golf": 3096, + "golfclub": 45742, + "golfer": 24579, + "golfers": 28441, + "golfing": 31379, + "goli": 29265, + "goliath": 41602, + "gom": 7051, + "goma": 46198, + "gomes": 39128, + "gomez": 16433, + "gon": 1854, + "gon": 3379, + "gona": 34835, + "gone": 35135, + "gone": 3601, + "gong": 28486, + "gonna": 2562, + "gonz": 10587, + "gonzaga": 36241, + "gonzale": 17512, + "gonzales": 31265, + "gonzalez": 18198, + "goo": 1381, + "goo": 17882, + "good": 2185, + "good": 886, + "goodbye": 6968, + "goodday": 46284, + "goode": 42076, + "goodfood": 46844, + "goodfriday": 40360, + "goodie": 29213, + "goodies": 13308, + "goodluck": 19718, + "goodman": 24146, + "goodmorning": 14421, + "goodness": 10531, + "goodnight": 8540, + "goodreads": 31629, + "goods": 9340, + "goodtimes": 22570, + "goodvibes": 43146, + "goodwill": 24902, + "goodwin": 28080, + "goodwood": 30008, + "goody": 35937, + "goodyear": 42858, + "goofy": 26879, + "goog": 18581, + "google": 12195, + "google": 3460, + "googled": 40345, + "googleplay": 37309, + "goon": 15267, + "goons": 30440, + "goooo": 35876, + "goooo": 48957, + "goose": 21445, + "goose": 13822, + "goosebumps": 32254, + "gop": 18942, + "gop": 6250, + "gopack": 46995, + "gopackgo": 47719, + "gopal": 47268, + "gopdebate": 39806, + "gopher": 47750, + "gopher": 48905, + "gophers": 31957, + "gopro": 17511, + "gor": 1747, + "gor": 29827, + "gordo": 47707, + "gordon": 20485, + "gordon": 8244, + "gore": 30311, + "gore": 17872, + "gorg": 46815, + "gorge": 35548, + "gorge": 20038, + "gorgeous": 3241, + "gori": 12461, + "goria": 43359, + "gorilla": 37910, + "gorilla": 21994, + "gorman": 35741, + "goro": 44977, + "gory": 7160, + "gos": 20517, + "gos": 5693, + "gosh": 15395, + "gosling": 35320, + "gosp": 9617, + "gospel": 11313, + "goss": 39734, + "goss": 36924, + "gossi": 15684, + "gossip": 18963, + "got": 10125, + "got": 1005, + "gota": 36693, + "gotcha": 43275, + "gote": 49345, + "goth": 48465, + "goth": 20437, + "gotham": 46123, + "gotham": 18299, + "gothic": 15426, + "goti": 9497, + "goto": 39715, + "gots": 35215, + "gott": 5089, + "gott": 36466, + "gotta": 4633, + "gotten": 5889, + "gotti": 41881, + "gotv": 36089, + "gou": 10520, + "gou": 36555, + "gouache": 43314, + "goul": 33187, + "gould": 31087, + "gour": 13580, + "gourmet": 19111, + "gov": 4022, + "gov": 4564, + "gove": 36997, + "govegan": 38886, + "gover": 10471, + "gover": 16759, + "govern": 2351, + "govern": 32404, + "governance": 13386, + "governing": 30946, + "government": 3149, + "governmental": 42609, + "governments": 19582, + "governor": 17459, + "governor": 6630, + "governors": 26881, + "govin": 42451, + "govt": 5345, + "govuk": 28830, + "gow": 21885, + "gow": 33788, + "gowan": 31307, + "gower": 43448, + "gown": 13719, + "gowns": 38029, + "goyal": 35105, + "gp": 19329, + "gp": 5051, + "gpa": 24098, + "gps": 13639, + "gpu": 38561, + "gq": 40286, + "gq": 31324, + "gr": 709, + "gr": 6062, + "gra": 782, + "gra": 15276, + "grab": 4646, + "grabbed": 22856, + "grabbing": 26440, + "grabs": 17076, + "grac": 11323, + "grace": 13225, + "grace": 5142, + "graced": 31894, + "graceful": 25242, + "graces": 38629, + "graci": 11174, + "gracias": 16463, + "gracie": 23235, + "gracing": 37263, + "gracious": 29044, + "grad": 19869, + "grad": 7291, + "gradable": 41529, + "grade": 45435, + "grade": 3394, + "graded": 13823, + "grader": 23930, + "graders": 10930, + "grades": 10838, + "gradient": 36885, + "grading": 19016, + "grads": 17811, + "gradu": 3230, + "gradual": 45210, + "gradually": 32192, + "graduate": 6675, + "graduated": 15128, + "graduates": 12236, + "graduating": 14819, + "graduation": 8060, + "grady": 33980, + "graeme": 30192, + "graf": 46478, + "graf": 39765, + "graff": 10656, + "graffiti": 11676, + "graft": 32698, + "grafton": 47347, + "graham": 19805, + "graham": 7711, + "grail": 37184, + "grain": 44003, + "grain": 12109, + "grains": 25791, + "gral": 25631, + "gram": 2949, + "gram": 2338, + "grammar": 16077, + "grammy": 15388, + "grammys": 18121, + "grams": 6294, + "gran": 3892, + "gran": 14493, + "granada": 31172, + "grand": 3058, + "grand": 2991, + "grandad": 29148, + "grandchildren": 36856, + "granddaughter": 29460, + "grande": 37514, + "grande": 10757, + "grandes": 36382, + "grandfather": 15346, + "grandma": 10525, + "grandmother": 17469, + "grandpa": 14582, + "grandparents": 21311, + "grandprix": 39358, + "grandson": 20766, + "grandstand": 43172, + "grange": 45027, + "grange": 23850, + "granger": 42968, + "granite": 18813, + "grann": 45585, + "granny": 22710, + "granola": 34271, + "grant": 18682, + "grant": 5442, + "granted": 14156, + "granth": 41283, + "grants": 15123, + "grape": 19131, + "grape": 15959, + "grapefruit": 28347, + "grapes": 18580, + "grapevine": 47619, + "graph": 1349, + "graph": 4407, + "graphene": 38387, + "grapher": 14987, + "graphers": 32088, + "graphic": 15653, + "graphic": 4245, + "graphical": 20878, + "graphicdesign": 21907, + "graphics": 9492, + "graphies": 40164, + "graphite": 29447, + "graphs": 24670, + "graphy": 4897, + "grapp": 30843, + "gras": 31517, + "gras": 17584, + "grasp": 34975, + "grass": 11584, + "grass": 5922, + "grasses": 46807, + "grasshopper": 48894, + "grassi": 42294, + "grasso": 34808, + "grassroots": 21991, + "grassy": 44140, + "grat": 9221, + "grate": 32463, + "grateful": 45659, + "grateful": 5730, + "grati": 36402, + "gratis": 33638, + "gratitude": 12614, + "grav": 20663, + "grave": 16606, + "grave": 9981, + "gravel": 27054, + "graves": 17665, + "graveyard": 31176, + "gravit": 26150, + "gravitational": 45268, + "gravity": 47426, + "gravity": 15160, + "gravy": 21225, + "gray": 12703, + "gray": 7048, + "grays": 46848, + "grayson": 45831, + "grayson": 25471, + "grazi": 42427, + "grazie": 38698, + "grazing": 29889, + "grc": 44069, + "gre": 689, + "gre": 17878, + "grease": 24132, + "greasy": 44376, + "great": 3265, + "great": 830, + "greate": 31930, + "greater": 32725, + "greater": 7033, + "greatest": 39080, + "greatest": 4153, + "greatly": 13978, + "greatness": 14189, + "greats": 21855, + "greaves": 42350, + "greco": 39103, + "gree": 9987, + "gree": 30774, + "greece": 6965, + "greed": 26147, + "greedy": 33301, + "greek": 23844, + "greek": 6842, + "greeks": 35866, + "green": 2762, + "green": 1901, + "greenberg": 46662, + "greene": 16383, + "greener": 31169, + "greenery": 42493, + "greenfield": 39924, + "greeng": 42077, + "greenhouse": 20819, + "greening": 48673, + "greenland": 27345, + "greenpeace": 44755, + "greens": 10235, + "greensboro": 33436, + "greenville": 25156, + "greenway": 35205, + "greenwich": 18658, + "greenwood": 25782, + "greer": 34345, + "greet": 11042, + "greet": 11997, + "greeted": 24546, + "greeting": 17754, + "greetings": 11569, + "greets": 25464, + "greg": 6894, + "greg": 7943, + "gregation": 20131, + "gregg": 39422, + "gregg": 22929, + "gregor": 33856, + "gregor": 16177, + "gregory": 16253, + "gren": 13941, + "gren": 20119, + "grenade": 33679, + "grenfell": 42107, + "gres": 39670, + "gress": 2752, + "gret": 30041, + "greta": 33443, + "gretchen": 45516, + "grette": 38774, + "grew": 10451, + "grey": 9190, + "grey": 5046, + "greyhound": 27363, + "greyhounds": 45718, + "greys": 44311, + "greysanatomy": 36833, + "gri": 2169, + "gri": 18484, + "grid": 29067, + "grid": 9882, + "gridi": 41063, + "gridiron": 47786, + "grids": 46500, + "grief": 21058, + "grier": 22016, + "griev": 36400, + "grieving": 42383, + "griez": 47962, + "griezmann": 48396, + "griff": 17855, + "griff": 35551, + "griffi": 28676, + "griffin": 46612, + "griffin": 13161, + "griffith": 24375, + "griffiths": 34182, + "gril": 49091, + "grill": 44083, + "grill": 9519, + "grille": 34748, + "grilled": 10691, + "grilling": 28324, + "grills": 39464, + "grim": 20383, + "grim": 23635, + "grime": 37101, + "grimes": 25057, + "grimm": 27865, + "grims": 34861, + "grimsby": 41513, + "grin": 11033, + "grin": 28697, + "grinch": 40527, + "grind": 25730, + "grind": 11810, + "grinder": 31733, + "grinding": 21541, + "gring": 40135, + "grip": 15521, + "gripping": 34567, + "grips": 27819, + "gris": 29150, + "grit": 22037, + "grit": 22087, + "grits": 44307, + "gritty": 33704, + "grizz": 14877, + "grizz": 44088, + "grizzlies": 25594, + "grizzly": 29676, + "grl": 48005, + "gro": 1464, + "gro": 12691, + "grocer": 11633, + "groceries": 32409, + "grocery": 13826, + "grom": 45284, + "gron": 22345, + "groningen": 45639, + "groo": 9015, + "groom": 39883, + "groom": 22813, + "grooming": 25575, + "groot": 37708, + "groove": 39484, + "groove": 17680, + "grooves": 43954, + "groovy": 30143, + "gros": 26834, + "gros": 32639, + "gross": 31080, + "gross": 11541, + "grosven": 46911, + "grote": 47207, + "grotto": 45260, + "grou": 1582, + "groun": 45110, + "ground": 9558, + "ground": 2461, + "groundbreaking": 21006, + "grounded": 27799, + "grounds": 8454, + "groundwater": 39457, + "group": 19045, + "group": 1771, + "groupe": 47654, + "groups": 6776, + "grouse": 36327, + "grove": 31756, + "grove": 7463, + "grover": 31345, + "groves": 27306, + "grow": 3179, + "grow": 4559, + "grower": 44925, + "growers": 25689, + "growing": 28429, + "growing": 4425, + "growingup": 43433, + "growler": 47096, + "grown": 41762, + "grown": 7120, + "grows": 13352, + "growth": 17925, + "growth": 4026, + "growthhacking": 25963, + "grp": 27321, + "grt": 28557, + "gru": 5957, + "grub": 34019, + "grue": 42047, + "gruesome": 47111, + "grum": 45454, + "grump": 49015, + "grumpy": 23610, + "grun": 16203, + "grunge": 33745, + "gry": 16140, + "gry": 5364, + "gs": 25818, + "gs": 1345, + "gsa": 40433, + "gsc": 47751, + "gshore": 43392, + "gsm": 32181, + "gsp": 49173, + "gst": 22239, + "gt": 16151, + "gt": 4725, + "gta": 14826, + "gta": 15338, + "gtaonline": 27292, + "gtav": 27283, + "gti": 39954, + "gto": 39071, + "gtr": 33407, + "gts": 37338, + "gtx": 35230, + "gu": 700, + "gu": 12916, + "gua": 23751, + "guacam": 37477, + "guacamole": 40115, + "guad": 22966, + "guadal": 46097, + "guadalu": 36994, + "guadalupe": 38360, + "guam": 37325, + "guan": 44191, + "guan": 42406, + "guang": 27019, + "guangzhou": 37857, + "guar": 4119, + "guaran": 9242, + "guarantee": 17421, + "guaranteed": 14731, + "guarantees": 40154, + "guard": 30776, + "guard": 4901, + "guarded": 40602, + "guardi": 12008, + "guardia": 43628, + "guardian": 23713, + "guardian": 9498, + "guardians": 21479, + "guarding": 24966, + "guardiola": 32100, + "guards": 12810, + "guatem": 19423, + "guatemala": 21670, + "guay": 48591, + "guay": 24247, + "gubernat": 41400, + "gubernatorial": 41618, + "gucci": 16779, + "gud": 48061, + "gud": 22378, + "gue": 2030, + "gue": 2917, + "gued": 38893, + "guel": 23146, + "guelph": 27660, + "guer": 10391, + "guern": 29277, + "guernsey": 33982, + "guerra": 38215, + "guerrero": 31967, + "guerrilla": 36715, + "gues": 39971, + "gues": 12601, + "guess": 35506, + "guess": 3135, + "guessed": 28005, + "guesses": 30623, + "guessing": 21891, + "guest": 27349, + "guest": 3781, + "guests": 6212, + "guet": 36797, + "guetta": 45904, + "guez": 12313, + "gug": 31358, + "guggen": 35086, + "guggenheim": 37135, + "gui": 2587, + "gui": 25746, + "guid": 11437, + "guidance": 12508, + "guide": 21845, + "guide": 3555, + "guided": 13194, + "guidelines": 16591, + "guides": 14375, + "guiding": 22759, + "guido": 41818, + "guil": 5008, + "guild": 19755, + "guild": 16597, + "guildford": 34450, + "guildhall": 47224, + "guillau": 41123, + "guillaume": 45394, + "guiller": 33660, + "guillermo": 39524, + "guilt": 26354, + "guilty": 9761, + "guin": 13284, + "guin": 47863, + "guine": 13759, + "guinea": 18537, + "guinness": 16648, + "guire": 18209, + "guise": 42024, + "guit": 3759, + "guitar": 21746, + "guitar": 5084, + "guitarist": 13035, + "guitars": 15023, + "guj": 34935, + "gujar": 12698, + "gujarat": 14714, + "guk": 20280, + "gul": 5530, + "gul": 21350, + "gula": 27426, + "gular": 34969, + "gulf": 22101, + "gulf": 11279, + "gull": 48764, + "gull": 28778, + "gulls": 37501, + "gully": 46112, + "gum": 22041, + "gum": 11235, + "gumb": 40147, + "gumbo": 47126, + "gummy": 34276, + "gums": 46609, + "gun": 2748, + "gun": 3496, + "guna": 43333, + "gundam": 26087, + "gundy": 21162, + "gunman": 32743, + "gunmen": 44738, + "gunn": 27473, + "gunna": 24002, + "gunnar": 45301, + "gunner": 35285, + "gunners": 37788, + "guns": 7591, + "gunsense": 44781, + "gunshot": 49250, + "gunsn": 49028, + "gup": 38632, + "gup": 47335, + "gupta": 15905, + "gur": 3218, + "gur": 30224, + "gura": 46836, + "gurgaon": 33240, + "guri": 43888, + "gurl": 25445, + "gurmee": 35482, + "gurmeetramrahim": 36549, + "guru": 18629, + "guru": 10800, + "gurudev": 48647, + "gus": 8018, + "gust": 24629, + "gusta": 23024, + "gusta": 44196, + "gustav": 32062, + "gustav": 37921, + "gustave": 43170, + "gustavo": 45943, + "gusto": 37937, + "gusts": 20896, + "gusty": 27589, + "gut": 24780, + "gut": 13486, + "guter": 44963, + "guterres": 48738, + "guth": 31696, + "guthrie": 33164, + "gutier": 32773, + "gutierrez": 33739, + "guts": 25983, + "gutted": 26524, + "gutter": 40537, + "guwa": 43063, + "guwahati": 45045, + "guy": 10008, + "guy": 2149, + "guyana": 45215, + "guyen": 28031, + "guys": 43588, + "guys": 1791, + "guyz": 48170, + "guzman": 37960, + "gv": 15462, + "gv": 17336, + "gw": 7172, + "gw": 15717, + "gwen": 32165, + "gwen": 24182, + "gwin": 43005, + "gwy": 32226, + "gwyne": 36923, + "gx": 40227, + "gy": 2168, + "gy": 1164, + "gya": 43214, + "gyan": 43814, + "gye": 21728, + "gyllen": 49348, + "gym": 9902, + "gym": 5222, + "gymna": 13517, + "gymnasium": 42847, + "gymnast": 42658, + "gymnastics": 20116, + "gyn": 39603, + "gyne": 45836, + "gyp": 40053, + "gypsy": 22354, + "gypt": 41921, + "gz": 45937, + "gz": 35841, + "gö": 40778, + "gü": 31907, + "h": 71, + "h": 327, + "ha": 560, + "ha": 1429, + "haa": 26814, + "haal": 35869, + "haan": 36284, + "haar": 45247, + "haar": 35859, + "haas": 27443, + "haasan": 26601, + "hab": 20573, + "hab": 20002, + "haban": 46225, + "haber": 44737, + "habit": 8491, + "habit": 17215, + "habitat": 11747, + "habitats": 35344, + "habits": 14540, + "habs": 27489, + "hac": 20343, + "hace": 43623, + "haci": 40674, + "hack": 6610, + "hack": 11182, + "hackathon": 25182, + "hacked": 19575, + "hacker": 22376, + "hackers": 21498, + "hacking": 12939, + "hackney": 48811, + "hackney": 24928, + "hacks": 19965, + "had": 10660, + "had": 1100, + "hadi": 39058, + "hadid": 26415, + "hadith": 46907, + "hadley": 44995, + "hadn": 21480, + "hadoop": 43868, + "hae": 30723, + "hae": 27193, + "hafi": 39914, + "hag": 26855, + "hag": 43207, + "hagan": 47489, + "hagen": 14664, + "hager": 48773, + "hagg": 26324, + "hague": 28988, + "hah": 18108, + "hah": 13680, + "haha": 1913, + "haha": 3060, + "hahah": 27253, + "hahah": 15441, + "hahaha": 4722, + "hahahah": 37513, + "hahahah": 20096, + "hahahaha": 8058, + "hahahaha": 9501, + "hahahahah": 33334, + "hahahahaha": 16347, + "hahahahahaha": 26487, + "hahahahahahaha": 43653, + "hahahahahahahaha": 36126, + "hahahha": 49205, + "hahn": 35596, + "hai": 8734, + "hai": 5234, + "haider": 42200, + "haiku": 19542, + "hail": 15272, + "hail": 8634, + "hailed": 44604, + "hailey": 27703, + "hailing": 47288, + "hails": 32571, + "hailstate": 35063, + "hain": 23861, + "hair": 4658, + "hair": 2225, + "haircare": 43682, + "haircut": 14711, + "hairdresser": 47468, + "haired": 27202, + "hairs": 27951, + "hairstyle": 22324, + "hairstyles": 40627, + "hairy": 26513, + "haiti": 17368, + "haitian": 37577, + "haj": 27885, + "haj": 43191, + "haji": 41889, + "hajj": 35576, + "hak": 25142, + "hak": 40671, + "haka": 44011, + "hake": 41663, + "hal": 1296, + "hal": 8708, + "hala": 25918, + "halal": 34216, + "halam": 29061, + "halamadrid": 31132, + "halder": 32201, + "hale": 37038, + "hale": 14701, + "halen": 39204, + "halep": 49017, + "haley": 37330, + "haley": 16839, + "half": 7453, + "half": 2349, + "halftime": 13742, + "halfway": 16736, + "hali": 9860, + "hali": 43030, + "halibut": 49030, + "halifax": 13411, + "hall": 6850, + "hall": 2140, + "halla": 29569, + "halle": 27763, + "halle": 32239, + "hallelujah": 36993, + "halli": 32665, + "hallmark": 31040, + "hallmark": 32053, + "hallmarkchannel": 36840, + "hallo": 3463, + "halloffame": 48578, + "halloween": 28537, + "halloween": 3739, + "halls": 18052, + "hallucin": 35385, + "hallway": 26845, + "halo": 33331, + "halo": 11918, + "halsey": 34256, + "halt": 25640, + "halter": 47194, + "halton": 45445, + "ham": 1522, + "ham": 1714, + "hama": 17944, + "hamas": 14818, + "hamburg": 18409, + "hamburger": 33928, + "hamid": 32377, + "hamil": 6725, + "hamill": 45784, + "hamill": 48729, + "hamillhimself": 47324, + "hamilton": 22448, + "hamilton": 7684, + "hamlet": 27722, + "hamlin": 49326, + "hamm": 46110, + "hammer": 15331, + "hammer": 9401, + "hammered": 37251, + "hammers": 35649, + "hammersmith": 42127, + "hammock": 33682, + "hammond": 21761, + "hamont": 18518, + "hamp": 6665, + "hamper": 27692, + "hampshire": 16006, + "hampstead": 37340, + "hampton": 36582, + "hampton": 12285, + "hamptons": 42415, + "hamr": 47979, + "hamradio": 36712, + "hams": 25619, + "hamster": 33313, + "hamstring": 39990, + "hamza": 45762, + "han": 1545, + "han": 3565, + "hana": 16801, + "hand": 1722, + "hand": 2463, + "handbag": 22654, + "handbags": 35667, + "handball": 27988, + "handbook": 25147, + "handcrafted": 22185, + "handed": 10881, + "handedly": 48656, + "handel": 40072, + "handful": 23725, + "handheld": 26812, + "handic": 17812, + "handicap": 27063, + "handicapp": 42349, + "handing": 19196, + "handle": 43681, + "handle": 7245, + "handled": 26824, + "handler": 29097, + "handles": 22124, + "handling": 14071, + "handmade": 18054, + "handmade": 6737, + "handmadehour": 25724, + "handover": 46922, + "hands": 3500, + "handshake": 38418, + "handsome": 7438, + "handwriting": 29986, + "handwritten": 35192, + "handy": 13479, + "hane": 28411, + "hang": 3351, + "hang": 5592, + "hangar": 33439, + "hanged": 40807, + "hanger": 28905, + "hangin": 22670, + "hanging": 4850, + "hangout": 17572, + "hangover": 20755, + "hangs": 21785, + "hani": 39944, + "hani": 18374, + "hank": 35993, + "hank": 17655, + "hanks": 29943, + "hanley": 47284, + "hann": 5584, + "hanna": 10075, + "hannah": 18622, + "hannah": 9142, + "hannel": 43477, + "hanni": 19493, + "hannibal": 25149, + "hannity": 24569, + "hannover": 39976, + "hanoi": 36134, + "hanover": 33246, + "hans": 35172, + "hans": 16628, + "hansen": 19729, + "hanson": 24602, + "hant": 40641, + "hanuk": 32774, + "hanukkah": 34247, + "hanuman": 46975, + "hao": 27184, + "hap": 44981, + "hap": 47988, + "happ": 784, + "happen": 21486, + "happen": 4506, + "happened": 4402, + "happening": 4284, + "happeningnow": 43107, + "happenings": 41998, + "happens": 4988, + "happier": 14118, + "happiest": 13811, + "happily": 17316, + "happiness": 5096, + "happy": 2952, + "happy": 900, + "happybirthday": 9651, + "happybirthday": 12207, + "happydays": 25106, + "happye": 33922, + "happyeaster": 38745, + "happyfathersday": 43534, + "happyfriday": 33340, + "happyhalloween": 28750, + "happyholidays": 32186, + "happyhour": 32036, + "happymonday": 47364, + "happymothersday": 42425, + "happynewyear": 18655, + "happythanksgiving": 40593, + "happyvalentinesday": 42403, + "haps": 9114, + "haq": 32445, + "har": 915, + "har": 5888, + "hara": 10367, + "haram": 35732, + "haram": 22950, + "haran": 27921, + "harare": 43562, + "haras": 26644, + "harass": 16481, + "harassed": 43067, + "harassment": 16641, + "harat": 28984, + "harb": 5856, + "harbaugh": 45220, + "harbor": 40686, + "harbor": 10202, + "harbour": 35430, + "harbour": 10011, + "harcourt": 48093, + "hard": 3312, + "hard": 1626, + "hardcover": 31123, + "harden": 27350, + "harder": 12274, + "hardest": 15258, + "hardin": 43802, + "harding": 24382, + "hardly": 17363, + "hardro": 28126, + "hardrock": 48365, + "hardrock": 40739, + "hards": 44048, + "hardship": 45085, + "hardt": 17922, + "hardware": 11957, + "hardwell": 45572, + "hardwick": 46864, + "hardwood": 28167, + "hardwork": 42554, + "hardwork": 27404, + "hardworking": 28095, + "hardworkpaysoff": 49193, + "hardy": 48179, + "hardy": 14113, + "hare": 27903, + "hare": 18464, + "harga": 39738, + "hari": 25472, + "hari": 8981, + "harlan": 49133, + "harle": 29096, + "harlem": 17771, + "harley": 24702, + "harley": 13632, + "harleydavidson": 39183, + "harlow": 34113, + "harm": 16656, + "harm": 14452, + "harman": 42434, + "harmed": 39637, + "harmful": 21725, + "harmless": 44369, + "harmon": 10828, + "harmon": 28729, + "harmony": 10785, + "harms": 46703, + "harne": 43323, + "harness": 23205, + "harold": 16917, + "harp": 27339, + "harper": 31288, + "harper": 12634, + "harri": 6639, + "harrier": 37372, + "harriet": 27154, + "harrington": 34340, + "harris": 25356, + "harris": 6925, + "harrisburg": 40590, + "harrison": 34389, + "harrison": 10540, + "harro": 18939, + "harrogate": 30842, + "harrow": 38807, + "harry": 11094, + "harry": 3600, + "harrypotter": 23375, + "harsh": 30596, + "harsh": 16944, + "hart": 9335, + "hart": 7752, + "hartford": 23434, + "harth": 35619, + "hartle": 47482, + "hartley": 31268, + "hartman": 43294, + "haru": 35099, + "harvard": 28118, + "harvard": 12848, + "harve": 6405, + "harvest": 44495, + "harvest": 8971, + "harvested": 35899, + "harvesting": 26674, + "harvey": 33289, + "harvey": 9586, + "harvick": 46983, + "haryana": 27661, + "has": 13855, + "has": 791, + "hasan": 30049, + "hasbro": 37405, + "hash": 6338, + "hash": 19199, + "hashi": 41831, + "hashmi": 35852, + "hashtag": 34015, + "hashtag": 9238, + "hashtags": 23514, + "haskell": 48550, + "hasn": 9143, + "hass": 9298, + "hassan": 15829, + "hassee": 37117, + "hassel": 32204, + "hassle": 35762, + "hast": 18146, + "hasta": 36623, + "hastings": 22035, + "hat": 3447, + "hat": 3801, + "hatch": 24202, + "hatch": 17809, + "hatchback": 42348, + "hatched": 42158, + "hate": 23546, + "hate": 3753, + "hated": 21298, + "hateful": 36418, + "hater": 36917, + "haters": 14027, + "hates": 14957, + "hatfield": 38448, + "hath": 27894, + "hath": 34416, + "hathaway": 31801, + "hati": 26045, + "hating": 25668, + "hatred": 19046, + "hats": 9812, + "hatt": 8747, + "hatton": 44861, + "hau": 5152, + "hauer": 48751, + "haul": 23743, + "haul": 12332, + "hauled": 46620, + "hauling": 43132, + "haun": 9676, + "haunt": 31039, + "haunted": 14944, + "haunting": 24034, + "haunts": 48035, + "haus": 41755, + "haus": 16478, + "hausen": 33338, + "hauser": 46586, + "haute": 28854, + "hav": 13443, + "hav": 20447, + "havan": 36304, + "havana": 23357, + "havas": 46261, + "have": 18053, + "have": 720, + "haven": 33074, + "haven": 3871, + "havent": 29130, + "haver": 27876, + "haves": 49088, + "havin": 31937, + "having": 1977, + "havoc": 24447, + "haw": 2788, + "haw": 26954, + "hawa": 6067, + "hawa": 46278, + "hawai": 15800, + "hawaii": 32413, + "hawaii": 8265, + "hawaiian": 17734, + "hawan": 27765, + "hawk": 14704, + "hawk": 8218, + "hawke": 38178, + "hawker": 39051, + "hawkeye": 38666, + "hawkeyes": 34266, + "hawking": 33437, + "hawkins": 19740, + "hawks": 44806, + "hawks": 5841, + "hawthorn": 45372, + "hawthorne": 36730, + "hay": 4871, + "hay": 11367, + "haya": 41325, + "hayat": 49360, + "hayden": 19806, + "haydn": 48207, + "haye": 36583, + "hayes": 13555, + "hayley": 39986, + "hayley": 22204, + "haynes": 30496, + "hays": 41524, + "hayward": 29400, + "haz": 5040, + "haz": 39921, + "hazard": 26174, + "hazard": 15178, + "hazardous": 27102, + "hazards": 30639, + "haze": 22785, + "hazel": 19838, + "hazel": 21882, + "hazelnut": 35816, + "hazi": 22740, + "hazmat": 48887, + "hazrat": 45775, + "hazy": 32655, + "hb": 6854, + "hb": 12576, + "hbcu": 40008, + "hbd": 25277, + "hbd": 13594, + "hbo": 15252, + "hc": 15831, + "hc": 7821, + "hcs": 46850, + "hd": 11601, + "hd": 4414, + "hdd": 40508, + "hdmi": 33302, + "hdr": 28065, + "he": 651, + "he": 797, + "hea": 27150, + "hea": 32790, + "head": 1603, + "head": 1375, + "headache": 23849, + "headaches": 38025, + "headband": 28556, + "headed": 6153, + "header": 11077, + "heading": 4409, + "headless": 45219, + "headlights": 42422, + "headline": 10891, + "headliner": 38880, + "headlines": 14706, + "headlining": 26971, + "headphone": 37524, + "headphones": 14906, + "headquarters": 13041, + "heads": 5174, + "headset": 23883, + "headshot": 34890, + "heal": 1231, + "heal": 13833, + "healed": 31456, + "healer": 38328, + "healey": 38985, + "healing": 9295, + "heals": 32384, + "health": 2145, + "health": 1728, + "healthand": 43704, + "healthcare": 42500, + "healthcare": 6023, + "healthier": 18242, + "healthtech": 42694, + "healthy": 10330, + "healthy": 3782, + "healthye": 31532, + "healthyeating": 33761, + "healthyfood": 39996, + "healthylifestyle": 46254, + "healthyliving": 27293, + "healy": 34299, + "heap": 34781, + "heaps": 44446, + "hear": 2749, + "hear": 2584, + "heard": 4063, + "hearing": 46353, + "hearing": 5541, + "hearings": 33175, + "hearn": 36613, + "hears": 25395, + "heart": 4975, + "heart": 1936, + "heartbeat": 29154, + "heartbreak": 29281, + "heartbreaking": 21322, + "heartbroken": 35383, + "hearted": 21679, + "heartfelt": 22904, + "hearth": 31563, + "hearthstone": 34054, + "hearti": 29345, + "hearties": 44572, + "heartland": 31923, + "heartless": 47022, + "heartnews": 40426, + "hearts": 5516, + "heartw": 30002, + "heartwarming": 34080, + "hearty": 26994, + "heat": 12175, + "heat": 4403, + "heated": 17057, + "heater": 23246, + "heath": 12794, + "heath": 11719, + "heather": 20230, + "heather": 12470, + "heathrow": 24171, + "heating": 12478, + "heaton": 34557, + "heats": 36106, + "heatwave": 25726, + "heav": 2409, + "heaven": 15520, + "heaven": 5545, + "heavenly": 19117, + "heavens": 26026, + "heavier": 31253, + "heaviest": 33268, + "heavily": 14123, + "heavy": 12048, + "heavy": 4200, + "heavymetal": 39804, + "heavyweight": 17448, + "heb": 24700, + "heb": 34515, + "hebdo": 41817, + "hebrew": 27298, + "hebrides": 45121, + "hebron": 45725, + "hec": 18932, + "heck": 22985, + "heck": 14427, + "hectares": 44162, + "hectic": 37245, + "hector": 25852, + "hed": 18271, + "hedge": 16229, + "hedge": 20294, + "hedgehog": 21940, + "hedges": 41345, + "hee": 18364, + "hee": 15773, + "heechul": 42487, + "heed": 15118, + "heel": 33646, + "heel": 16861, + "heels": 10909, + "heem": 30061, + "heer": 40473, + "hef": 29473, + "heff": 48756, + "hefty": 48584, + "heg": 41995, + "heh": 25834, + "hehe": 48723, + "hehe": 10658, + "hehehe": 24138, + "hei": 6101, + "hei": 29051, + "heidel": 42927, + "heidelberg": 48445, + "heidi": 44860, + "heidi": 23867, + "heifer": 48219, + "heigh": 43883, + "height": 10788, + "heights": 8418, + "heim": 10931, + "heim": 9768, + "heimer": 39517, + "hein": 15487, + "hein": 43206, + "heine": 28742, + "heineken": 36874, + "heinrich": 47877, + "heinz": 32359, + "heir": 27083, + "heir": 34007, + "heirloom": 34232, + "heirs": 43834, + "heis": 21849, + "heisman": 34537, + "heist": 31035, + "heit": 37255, + "hel": 919, + "hel": 11579, + "hela": 48212, + "held": 4042, + "hele": 46129, + "helen": 17576, + "helen": 11291, + "helena": 23109, + "helene": 41591, + "helens": 45940, + "heli": 33874, + "heli": 40183, + "helicop": 10035, + "helicopter": 11956, + "helicopters": 26922, + "helium": 46505, + "helix": 35247, + "hell": 8410, + "hell": 4141, + "hella": 19800, + "hellboy": 48428, + "helle": 48600, + "helle": 46968, + "hellenic": 42544, + "heller": 44464, + "hello": 12887, + "hello": 3306, + "hells": 47989, + "helly": 48690, + "helm": 47970, + "helm": 19520, + "helmet": 11122, + "helmets": 21843, + "help": 8641, + "help": 1318, + "helped": 4845, + "helper": 29321, + "helpers": 36316, + "helpful": 12695, + "helping": 3875, + "helpless": 47638, + "helpline": 43101, + "helps": 5144, + "helsin": 17842, + "helsinki": 19626, + "hem": 20270, + "hem": 11148, + "hemi": 14256, + "hemi": 46856, + "heming": 30819, + "hemingway": 33470, + "hemisphere": 32767, + "hemmings": 34882, + "hemo": 43788, + "hemp": 28225, + "hemp": 18467, + "hems": 32451, + "hemsworth": 39428, + "hen": 2385, + "hen": 8047, + "hence": 23640, + "hend": 11560, + "hender": 49248, + "henderson": 14348, + "hendrick": 45296, + "hendricks": 37588, + "hendrix": 23605, + "henge": 33104, + "henley": 27853, + "henna": 39455, + "hennessy": 42667, + "henri": 19431, + "henri": 21610, + "henrik": 35772, + "henry": 16018, + "henry": 5508, + "hens": 31742, + "henson": 32935, + "hep": 17724, + "hep": 48791, + "hepat": 23767, + "hepatitis": 32169, + "hepburn": 26348, + "her": 1223, + "her": 899, + "hera": 38724, + "heral": 37809, + "herald": 27625, + "herald": 12851, + "herb": 26116, + "herb": 15302, + "herbal": 21868, + "herbali": 44087, + "herbalife": 48364, + "herbert": 19935, + "herbs": 17320, + "hercules": 26539, + "herd": 36142, + "herd": 18589, + "here": 9134, + "here": 763, + "hered": 47976, + "hereford": 35543, + "heres": 13566, + "hereto": 47673, + "heri": 31392, + "herit": 4720, + "heritag": 38273, + "heritage": 20962, + "heritage": 5455, + "herman": 31890, + "herman": 21568, + "hermann": 40942, + "hermes": 34563, + "hermi": 35265, + "hermione": 45502, + "hermit": 43953, + "hermitage": 47706, + "hermo": 40967, + "hermosa": 42531, + "hern": 30571, + "hern": 43576, + "hernandez": 17707, + "hero": 7338, + "hero": 3756, + "heroes": 38010, + "heroes": 5506, + "heroic": 24255, + "heroin": 23841, + "heroine": 27420, + "heron": 22593, + "heros": 37642, + "herr": 38537, + "herrera": 27755, + "herring": 30211, + "hers": 25359, + "herself": 9207, + "hersh": 20379, + "hershey": 29734, + "hert": 26744, + "hertfordshire": 41070, + "herts": 35784, + "herty": 23454, + "hertz": 49383, + "hes": 30553, + "hes": 12784, + "hesit": 23933, + "hesitate": 34967, + "hess": 41888, + "hester": 31105, + "het": 37527, + "het": 19678, + "hetero": 26405, + "heu": 20105, + "heughan": 32298, + "hew": 48141, + "hew": 43051, + "hewitt": 28871, + "hex": 16255, + "hex": 31241, + "hey": 10759, + "hey": 2189, + "hez": 34591, + "hezbollah": 37636, + "hf": 26606, + "hf": 20603, + "hfx": 47297, + "hg": 23986, + "hg": 26237, + "hgtv": 47657, + "hh": 3280, + "hh": 5180, + "hhh": 8281, + "hhhh": 19391, + "hhhh": 13121, + "hhhhh": 24246, + "hhhhhh": 37278, + "hhs": 27006, + "hi": 677, + "hi": 1883, + "hia": 20672, + "hiatus": 27823, + "hib": 15922, + "hiber": 38799, + "hibis": 36226, + "hibiscus": 36460, + "hibition": 24658, + "hibs": 42814, + "hic": 3549, + "hic": 38079, + "hick": 14813, + "hickman": 49148, + "hickory": 29905, + "hicks": 23429, + "hid": 15552, + "hid": 14451, + "hidalgo": 47464, + "hidden": 28305, + "hidden": 7029, + "hiddleston": 31444, + "hide": 17725, + "hide": 9379, + "hideous": 46588, + "hides": 30800, + "hiding": 11371, + "hie": 15763, + "hier": 23433, + "hier": 29913, + "hierarchy": 44442, + "hifi": 38168, + "hig": 38108, + "higgins": 21783, + "high": 1487, + "high": 1400, + "higher": 5321, + "highered": 27072, + "highest": 5317, + "highland": 32244, + "highland": 16062, + "highlander": 46251, + "highlanders": 40445, + "highlands": 16883, + "highlight": 8264, + "highlighted": 22252, + "highlighter": 45460, + "highlighting": 17344, + "highlights": 6173, + "highly": 5302, + "highness": 38694, + "highs": 15144, + "highschool": 23102, + "highway": 45344, + "highway": 7620, + "highways": 28007, + "higu": 39115, + "hihi": 36240, + "hii": 42315, + "hijab": 31407, + "hika": 41356, + "hikari": 44624, + "hike": 9404, + "hiked": 36471, + "hiker": 40947, + "hikers": 46090, + "hikes": 27076, + "hiking": 9118, + "hiko": 48708, + "hil": 3508, + "hil": 17927, + "hila": 38837, + "hilar": 37337, + "hilari": 7784, + "hilarious": 8358, + "hilariously": 43476, + "hilary": 45898, + "hilary": 25415, + "hilde": 45382, + "hill": 3671, + "hill": 2682, + "hillary": 13257, + "hillary": 7074, + "hillaryclinton": 15357, + "hilli": 32513, + "hills": 24178, + "hills": 5289, + "hillsborough": 32157, + "hillside": 37194, + "hilltop": 45858, + "hilly": 32483, + "hilton": 33621, + "hilton": 14012, + "him": 4128, + "him": 1269, + "himach": 29132, + "himachal": 35461, + "himalay": 17552, + "himalayan": 30318, + "himalayas": 32872, + "hime": 45892, + "himself": 4530, + "himss": 41730, + "hin": 1676, + "hin": 37930, + "hina": 40571, + "hinakhan": 45518, + "hinch": 49320, + "hind": 34460, + "hind": 23293, + "hindi": 14967, + "hinds": 47859, + "hindu": 17587, + "hindu": 12053, + "hinduism": 40592, + "hindus": 25701, + "hindustan": 46553, + "hines": 37462, + "hing": 37968, + "hini": 33564, + "hino": 45343, + "hint": 11868, + "hinton": 47165, + "hints": 20594, + "hio": 32897, + "hip": 11725, + "hip": 6584, + "hipho": 8819, + "hiphop": 26598, + "hiphop": 10914, + "hipp": 13607, + "hippie": 28637, + "hippo": 28398, + "hippo": 36729, + "hips": 30191, + "hipstamatic": 31002, + "hipster": 19987, + "hipsters": 48265, + "hir": 4959, + "hir": 14728, + "hira": 42577, + "hire": 32356, + "hire": 8243, + "hired": 17602, + "hires": 24133, + "hiring": 7835, + "hiro": 17396, + "hiro": 20588, + "hiroshima": 33867, + "hirsch": 46967, + "his": 15211, + "his": 787, + "hism": 23502, + "hispan": 16843, + "hispanic": 22676, + "hist": 21710, + "hist": 13779, + "histo": 33479, + "histor": 2993, + "historia": 46010, + "historian": 20697, + "historians": 35200, + "historic": 30195, + "historic": 5726, + "historical": 34154, + "historical": 8039, + "historically": 30445, + "histories": 34736, + "history": 11142, + "history": 1695, + "historymonth": 19356, + "historyof": 35905, + "hit": 5453, + "hit": 2341, + "hitch": 22937, + "hitch": 36203, + "hitler": 16518, + "hitman": 33290, + "hits": 4712, + "hitter": 23538, + "hitters": 39724, + "hitting": 7957, + "hiv": 44410, + "hiv": 11018, + "hive": 38162, + "hive": 18521, + "hiya": 42393, + "hk": 22648, + "hk": 12307, + "hl": 8297, + "hl": 5956, + "hle": 32389, + "hler": 35418, + "hm": 17913, + "hm": 7631, + "hmm": 13725, + "hmmm": 17032, + "hmmmm": 34598, + "hms": 14625, + "hmu": 21630, + "hmv": 49288, + "hn": 22905, + "hn": 7478, + "hns": 48412, + "ho": 606, + "ho": 2971, + "hoa": 37517, + "hoar": 31628, + "hoax": 33438, + "hob": 18212, + "hobart": 31646, + "hobb": 16175, + "hobbies": 36370, + "hobbit": 23207, + "hobbs": 34343, + "hobby": 41120, + "hobby": 17557, + "hobo": 34613, + "hobo": 41334, + "hoboken": 41568, + "hoc": 35880, + "hoch": 43772, + "hock": 34914, + "hock": 46574, + "hockey": 16499, + "hockey": 4111, + "hoco": 34771, + "hod": 31062, + "hodg": 23660, + "hodge": 40585, + "hodges": 35061, + "hodgson": 37044, + "hoe": 32502, + "hoe": 11262, + "hoek": 40073, + "hoes": 21164, + "hof": 20186, + "hof": 12789, + "hofer": 38654, + "hoff": 32860, + "hoff": 22751, + "hofficial": 41949, + "hoffman": 22026, + "hog": 12075, + "hog": 13255, + "hogan": 19757, + "hogg": 42005, + "hogs": 23242, + "hogwarts": 29168, + "hoh": 43947, + "hoi": 39295, + "hok": 26942, + "hok": 47167, + "hokies": 35168, + "hokkaido": 49145, + "hol": 1187, + "hol": 7349, + "hola": 28724, + "hold": 36496, + "hold": 3254, + "holden": 21869, + "holder": 7862, + "holders": 10074, + "holding": 5050, + "holdings": 24832, + "holds": 7286, + "hole": 47242, + "hole": 5341, + "holes": 11266, + "holi": 2093, + "holi": 21926, + "holic": 16348, + "holics": 29782, + "holiday": 13168, + "holiday": 2878, + "holidays": 5372, + "holiness": 37259, + "holistic": 26300, + "holl": 27699, + "holla": 26500, + "holland": 31608, + "holland": 9978, + "hollande": 47690, + "holler": 49047, + "holli": 24019, + "holliday": 41624, + "hollow": 41221, + "hollow": 16691, + "holloway": 29435, + "holly": 12731, + "holly": 11923, + "hollyo": 41525, + "hollyoaks": 43352, + "hollywood": 24655, + "hollywood": 5518, + "holm": 34758, + "holm": 12739, + "holme": 46149, + "holmes": 12756, + "holo": 10317, + "holocau": 14688, + "holocaust": 16476, + "hols": 33344, + "holt": 18868, + "holtz": 44743, + "holy": 13910, + "holy": 4874, + "hom": 906, + "hom": 47397, + "homa": 9557, + "homage": 17746, + "home": 2143, + "home": 1137, + "homebrew": 35046, + "homec": 33869, + "homecoming": 9008, + "homedecor": 15695, + "homedepot": 38707, + "homegrown": 32554, + "homeitems": 42972, + "homeland": 21633, + "homeless": 18403, + "homeless": 9661, + "homelessness": 19851, + "homemade": 7889, + "homeof": 48856, + "homeowner": 37267, + "homeowners": 29882, + "homepage": 29828, + "homer": 29307, + "homer": 16931, + "homers": 38333, + "homes": 19480, + "homes": 5416, + "homeschool": 40994, + "homestead": 32609, + "homeswee": 46298, + "hometown": 12238, + "homework": 12495, + "homicide": 21520, + "homie": 12540, + "homies": 18893, + "homme": 26193, + "homo": 18129, + "homo": 30504, + "homophobia": 37875, + "homophobic": 40975, + "homosexual": 44288, + "homosexuality": 46720, + "homs": 45413, + "hon": 1279, + "hon": 10296, + "honda": 8553, + "honduras": 29715, + "hone": 38640, + "honest": 7814, + "honest": 9602, + "honestly": 9155, + "honesty": 24939, + "honey": 9843, + "honey": 6406, + "honeycomb": 48583, + "honeymoon": 22527, + "hong": 12144, + "hong": 8598, + "hongkong": 16659, + "honi": 17918, + "honolulu": 28096, + "honor": 9206, + "honor": 3402, + "honorable": 19498, + "honorary": 15675, + "honore": 25868, + "honored": 5494, + "honoree": 38993, + "honorees": 43012, + "honoring": 10771, + "honors": 10248, + "honour": 8240, + "honourable": 29855, + "honoured": 11945, + "honouring": 37754, + "honours": 22558, + "hoo": 2300, + "hoo": 7920, + "hood": 18681, + "hood": 3222, + "hooded": 33631, + "hoodie": 13444, + "hoodies": 25974, + "hoods": 16664, + "hoof": 44555, + "hook": 30488, + "hook": 10395, + "hookah": 34214, + "hooked": 18138, + "hookem": 31465, + "hooker": 37891, + "hooking": 35240, + "hooks": 25068, + "hooligans": 48176, + "hoon": 21368, + "hooo": 44538, + "hoop": 31516, + "hoop": 19573, + "hooper": 35221, + "hoops": 9351, + "hoor": 22155, + "hooray": 24940, + "hoos": 46462, + "hoosier": 48886, + "hoosiers": 42780, + "hoot": 29164, + "hoover": 25691, + "hop": 10848, + "hop": 5833, + "hope": 5263, + "hope": 1683, + "hoped": 30628, + "hopeful": 21453, + "hopefully": 7602, + "hopeless": 35586, + "hopes": 10018, + "hoping": 7207, + "hopkins": 17821, + "hopp": 48839, + "hopped": 34220, + "hopper": 21748, + "hopping": 27606, + "hoppy": 38359, + "hops": 21137, + "hor": 1407, + "hor": 33847, + "hora": 26013, + "horace": 39282, + "horan": 26857, + "horde": 44947, + "hore": 15380, + "horiz": 8144, + "horizon": 17924, + "horizon": 11920, + "horizons": 29685, + "horizontal": 25775, + "hormon": 27096, + "hormone": 31283, + "hormones": 35162, + "horn": 15771, + "horn": 9607, + "horne": 38143, + "horned": 34526, + "hornet": 28739, + "hornets": 20124, + "horns": 22109, + "horny": 32622, + "horo": 21500, + "horoscope": 38453, + "horowitz": 44669, + "horri": 8656, + "horrible": 13726, + "horribly": 45484, + "horrific": 25314, + "horrifying": 38901, + "horror": 13787, + "horror": 5032, + "horrormovies": 46682, + "horrors": 33321, + "horse": 8562, + "horse": 4558, + "horseback": 43673, + "horseman": 48885, + "horsepower": 36882, + "horser": 23096, + "horseracing": 30693, + "horses": 8809, + "horseshoe": 29242, + "horst": 37182, + "hort": 19482, + "horticul": 27141, + "horticulture": 39998, + "horton": 25945, + "hortons": 38422, + "horus": 29794, + "hos": 44320, + "hos": 25008, + "hosa": 44618, + "hose": 19662, + "hoseok": 38817, + "hosp": 2847, + "hosp": 37853, + "hospice": 20533, + "hospit": 7180, + "hospital": 29399, + "hospital": 3851, + "hospitality": 11657, + "hospitalized": 36915, + "hospitals": 13816, + "host": 17403, + "host": 3953, + "hostage": 26119, + "hoste": 31700, + "hosted": 6017, + "hostel": 27225, + "hostess": 39692, + "hostile": 28074, + "hosting": 4857, + "hosts": 8718, + "hot": 2851, + "hot": 2069, + "hota": 43289, + "hotdog": 43758, + "hotel": 14591, + "hotel": 2738, + "hotels": 8654, + "hotline": 30516, + "hotmail": 46427, + "hotness": 39803, + "hotra": 27109, + "hotro": 47823, + "hotspot": 36606, + "hotspur": 35176, + "hotter": 23591, + "hottest": 8279, + "hottie": 22804, + "hotties": 46027, + "hou": 1011, + "hou": 10122, + "hough": 44529, + "houghton": 36133, + "houn": 39273, + "houn": 33607, + "hound": 33996, + "hound": 13561, + "hounds": 21178, + "hounews": 48373, + "hour": 14930, + "hour": 2232, + "hourly": 30918, + "hours": 2382, + "house": 4107, + "house": 1212, + "housed": 37518, + "household": 12412, + "households": 27167, + "housel": 48685, + "housemusic": 28468, + "houseof": 19928, + "houses": 7791, + "housewives": 38523, + "housing": 32924, + "housing": 5734, + "houston": 16564, + "houston": 5663, + "hov": 40291, + "hove": 29674, + "hoven": 35559, + "hover": 36252, + "hover": 49016, + "hovering": 43437, + "how": 7470, + "how": 829, + "howar": 37672, + "howard": 25447, + "howard": 7632, + "howdy": 42216, + "howe": 8179, + "howe": 24614, + "howell": 25297, + "hower": 32920, + "however": 8467, + "howi": 47883, + "howie": 42939, + "howl": 40332, + "howling": 41771, + "howto": 38191, + "howto": 44060, + "hoy": 39625, + "hoy": 13278, + "hoya": 40978, + "hp": 23753, + "hp": 6371, + "hpa": 30983, + "hpc": 39936, + "hpe": 33787, + "hpv": 45765, + "hq": 33571, + "hq": 4693, + "hr": 4810, + "hr": 4086, + "hra": 21320, + "hra": 17212, + "hrc": 18139, + "hrh": 29103, + "hri": 21068, + "hrithik": 45371, + "hrs": 7157, + "hru": 24127, + "hrw": 25064, + "hs": 9343, + "hs": 2466, + "hsbc": 31508, + "hsc": 43510, + "hse": 34057, + "hsfb": 29539, + "hsv": 47311, + "ht": 11123, + "ht": 7801, + "hta": 23452, + "hta": 49384, + "htafc": 42821, + "htc": 48942, + "htc": 17635, + "html": 18231, + "hts": 43710, + "htt": 10620, + "http": 15066, + "https": 30901, + "httr": 49372, + "httweets": 43198, + "hu": 845, + "hu": 5949, + "hua": 22138, + "huan": 41405, + "huang": 32013, + "huar": 46916, + "huawe": 17709, + "huawei": 21128, + "hub": 18775, + "hub": 7028, + "hubb": 23183, + "hubbard": 33288, + "hubble": 30421, + "hubby": 16947, + "hubert": 40699, + "hubs": 29327, + "huck": 22909, + "huckabee": 43666, + "hud": 7169, + "hud": 28563, + "hudder": 22629, + "huddersfield": 24220, + "huddle": 33435, + "hudson": 25873, + "hudson": 11260, + "hue": 48380, + "hue": 21465, + "hues": 38003, + "huey": 39663, + "huff": 18746, + "huff": 44999, + "huffpost": 45887, + "hug": 40790, + "hug": 10359, + "huge": 2699, + "hugely": 24648, + "hugged": 41333, + "hugging": 27058, + "hugh": 8723, + "hugh": 15385, + "hughes": 11418, + "hugo": 43935, + "hugo": 17132, + "hugs": 14248, + "huh": 13348, + "huhu": 32134, + "hui": 29978, + "hul": 7911, + "hula": 40145, + "hulk": 17637, + "hull": 25154, + "hull": 10375, + "hulu": 24666, + "hum": 5823, + "hum": 16283, + "human": 3175, + "human": 2751, + "humane": 20220, + "humanitarian": 14170, + "humanities": 24949, + "humanity": 9420, + "humanright": 44385, + "humanrights": 14148, + "humans": 8324, + "humb": 9988, + "humber": 30602, + "humber": 38063, + "humble": 38703, + "humble": 10889, + "humbled": 19682, + "humbling": 39757, + "humbold": 24739, + "humboldt": 31389, + "hume": 38197, + "humid": 14778, + "humid": 27447, + "humidi": 47666, + "humidity": 15469, + "humil": 27205, + "humili": 25332, + "humility": 28535, + "humming": 26515, + "hummingbird": 33072, + "hummus": 31785, + "humor": 29369, + "humor": 11186, + "humorous": 38173, + "humour": 19161, + "hump": 16673, + "hump": 24529, + "humpback": 47662, + "humpday": 27693, + "humph": 19767, + "humphrey": 31549, + "hun": 1616, + "hun": 10795, + "hundre": 8505, + "hundred": 11898, + "hundreds": 8879, + "hung": 13825, + "hungar": 19420, + "hungarian": 23325, + "hungary": 17232, + "hunger": 25565, + "hunger": 10184, + "hungergames": 47507, + "hungover": 41110, + "hungry": 44845, + "hungry": 8451, + "hunk": 33912, + "hunt": 16498, + "hunt": 5774, + "hunted": 37373, + "hunter": 16531, + "hunter": 6099, + "hunters": 16115, + "hunting": 27830, + "hunting": 7507, + "huntington": 23521, + "hunts": 34041, + "huntsville": 34544, + "hur": 2305, + "hur": 34523, + "hurd": 44915, + "hurdle": 27486, + "hurdles": 25440, + "huri": 42486, + "hurley": 30166, + "hurling": 24738, + "huron": 36147, + "hurrah": 40599, + "hurric": 6543, + "hurrican": 36105, + "hurricane": 24051, + "hurricane": 8782, + "hurricanes": 22357, + "hurry": 10921, + "hurst": 44742, + "hurst": 11760, + "hurt": 7413, + "hurting": 24017, + "hurts": 13059, + "hus": 5111, + "hus": 35853, + "husband": 6179, + "husbands": 33612, + "hush": 28728, + "husk": 19246, + "huskers": 26946, + "huskies": 20988, + "husky": 20421, + "huss": 13733, + "hussain": 17940, + "hussein": 31336, + "hust": 27279, + "hustle": 15709, + "huston": 46480, + "hut": 20924, + "hut": 16503, + "hutch": 31018, + "hutch": 33203, + "hutchinson": 35721, + "hutto": 27662, + "hutton": 38321, + "hv": 17209, + "hv": 18593, + "hvac": 27492, + "hw": 27491, + "hw": 18876, + "hwa": 32352, + "hwan": 44390, + "hwang": 46775, + "hwy": 13812, + "hy": 1441, + "hy": 17827, + "hya": 31600, + "hyacin": 47263, + "hyatt": 44856, + "hyatt": 25146, + "hybri": 9084, + "hybrid": 10156, + "hyd": 42382, + "hyde": 46484, + "hyde": 16343, + "hyder": 13960, + "hyderabad": 14801, + "hydr": 8031, + "hydra": 44414, + "hydra": 40420, + "hydrange": 43298, + "hydrate": 29628, + "hydrated": 23300, + "hydrating": 47653, + "hydration": 24174, + "hydrau": 26017, + "hydraulic": 26189, + "hydro": 8368, + "hydro": 22595, + "hydrogen": 20974, + "hye": 32724, + "hye": 25792, + "hygi": 16277, + "hygiene": 19591, + "hymn": 41350, + "hyo": 38960, + "hyo": 35078, + "hyp": 16964, + "hype": 30353, + "hype": 11111, + "hyped": 22507, + "hyper": 7997, + "hyper": 22146, + "hypertension": 40698, + "hypno": 23355, + "hypnosis": 48138, + "hypnoti": 40440, + "hypo": 10252, + "hypocr": 30711, + "hypocri": 25606, + "hypocrisy": 26296, + "hypocrite": 44125, + "hypothe": 46966, + "hypothesis": 44956, + "hyster": 24235, + "hysteria": 45965, + "hysterical": 48627, + "hyuk": 20452, + "hyun": 11831, + "hyun": 8589, + "hyundai": 17094, + "hyung": 46901, + "hyung": 16551, + "hz": 32533, + "i": 72, + "i": 328, + "ia": 12486, + "ia": 1073, + "iac": 32838, + "iac": 44063, + "iaf": 40789, + "iah": 35052, + "iain": 30103, + "ial": 11530, + "ial": 1974, + "ials": 20940, + "iam": 3579, + "iam": 11415, + "iambic": 43668, + "iambicpent": 43891, + "iamsrk": 15103, + "ian": 7723, + "ian": 1800, + "ians": 6451, + "iansomerhalder": 47077, + "iart": 18413, + "iartg": 18669, + "ias": 32303, + "ias": 14620, + "ib": 3962, + "ib": 13554, + "iba": 39763, + "ibadan": 44691, + "iban": 47145, + "ibc": 49014, + "ibd": 40732, + "iber": 23814, + "ibi": 12337, + "ibis": 47048, + "ibiza": 13853, + "ible": 37792, + "ibles": 44102, + "ibm": 23415, + "ibm": 13918, + "ibn": 25729, + "ibooks": 46887, + "ibra": 15476, + "ibrahi": 40350, + "ibrahim": 20816, + "ibrox": 46883, + "ibs": 41993, + "ibu": 43587, + "ibu": 46117, + "ic": 535, + "ic": 1029, + "ica": 2576, + "icago": 37492, + "ical": 6082, + "ical": 1110, + "ically": 3161, + "icals": 13999, + "ican": 17653, + "ican": 5246, + "icans": 20511, + "icar": 37211, + "ication": 21629, + "icc": 12945, + "ice": 2739, + "ice": 733, + "iceberg": 33662, + "icec": 13636, + "icecream": 21334, + "iced": 8049, + "icelan": 34114, + "iceland": 46716, + "iceland": 11935, + "icelandic": 34705, + "ices": 1931, + "ich": 5333, + "ich": 1232, + "icha": 31453, + "iche": 28972, + "iche": 21143, + "ichi": 21669, + "ichi": 14647, + "ichick": 45022, + "ichiro": 43787, + "ici": 948, + "ici": 22189, + "icia": 11774, + "icial": 17543, + "icial": 6397, + "ician": 40522, + "ician": 5374, + "icians": 6264, + "iciary": 21329, + "icic": 46006, + "icide": 6558, + "icides": 28253, + "icing": 7676, + "icio": 24207, + "icion": 45905, + "icious": 3325, + "icist": 21165, + "icists": 42171, + "icity": 7243, + "ick": 1168, + "ick": 1068, + "icked": 39799, + "icker": 40357, + "ickers": 30701, + "icki": 35468, + "icking": 6619, + "icks": 3727, + "icky": 11587, + "icn": 44516, + "ico": 13697, + "ico": 3040, + "icom": 17693, + "icom": 29796, + "icon": 13843, + "icon": 5646, + "iconic": 6959, + "icons": 15553, + "icop": 9389, + "icos": 32002, + "ics": 1324, + "ict": 6349, + "icted": 36515, + "iction": 40560, + "icton": 36548, + "icu": 45118, + "icu": 30443, + "icular": 40660, + "icus": 31459, + "icy": 28780, + "icy": 3495, + "icymi": 5315, + "icz": 46387, + "id": 1568, + "id": 1014, + "ida": 11032, + "ida": 11600, + "idad": 22462, + "idaho": 48817, + "idaho": 15165, + "idal": 39684, + "idan": 17929, + "idc": 22386, + "ide": 1909, + "ide": 14104, + "idea": 3612, + "ideal": 8789, + "ideally": 48247, + "ideals": 45096, + "ideas": 4452, + "ident": 7113, + "identi": 6009, + "identical": 25587, + "identification": 23337, + "identified": 15217, + "identifies": 35712, + "identify": 10949, + "identifying": 23589, + "identities": 34292, + "identity": 8892, + "ideology": 25840, + "iders": 8980, + "ides": 31791, + "idf": 28987, + "idge": 35567, + "idh": 44325, + "idi": 9611, + "idi": 14264, + "idio": 15994, + "idiot": 14087, + "idiots": 20856, + "idk": 8972, + "idle": 34754, + "idlib": 36199, + "ido": 6763, + "ido": 29641, + "idol": 24866, + "idol": 8884, + "idols": 21398, + "idr": 10106, + "idri": 46435, + "idris": 41312, + "ids": 6111, + "idu": 28655, + "idy": 33058, + "idyl": 44879, + "idyllic": 46632, + "ie": 6789, + "ie": 1718, + "iec": 44773, + "ied": 10059, + "ieee": 39860, + "iel": 27875, + "iel": 22729, + "ience": 1542, + "ient": 13115, + "ier": 33173, + "ier": 5912, + "iers": 45060, + "ies": 27912, + "ies": 963, + "iest": 10818, + "if": 8063, + "if": 878, + "ifa": 37574, + "ifc": 36524, + "ife": 41172, + "ife": 19590, + "iff": 35753, + "ification": 35755, + "ified": 41403, + "ift": 31143, + "iftar": 35153, + "ifu": 41523, + "ify": 32807, + "ig": 1089, + "ig": 3072, + "iga": 16493, + "igan": 27468, + "igans": 25419, + "igbo": 44591, + "ige": 10806, + "igen": 33070, + "iger": 30758, + "iger": 20685, + "igers": 40755, + "igers": 48928, + "iggy": 46219, + "iggy": 27604, + "igh": 2712, + "igh": 5451, + "ight": 14571, + "ight": 897, + "ighton": 35292, + "igi": 21901, + "igle": 29912, + "iglesias": 39432, + "ign": 7303, + "ign": 2326, + "ignati": 37573, + "ignatius": 48318, + "igne": 45843, + "ignite": 25210, + "ignition": 36115, + "igno": 15375, + "ignor": 7653, + "ignorance": 22735, + "ignorant": 26933, + "ignore": 12304, + "ignored": 20428, + "ignores": 40129, + "ignoring": 23969, + "igor": 33024, + "igs": 31344, + "igu": 21279, + "ih": 12162, + "ih": 34135, + "ihear": 13043, + "iheart": 30332, + "iheartawards": 18811, + "iheartradio": 25934, + "ihop": 45511, + "ihri": 39108, + "ihrithik": 39326, + "ii": 5103, + "ii": 2329, + "iii": 46236, + "iii": 6572, + "iiii": 20133, + "iiii": 45393, + "iiot": 30704, + "iit": 39330, + "iit": 33238, + "ij": 7337, + "ija": 42802, + "ik": 3903, + "ik": 10177, + "ika": 18188, + "ike": 12329, + "ike": 19696, + "ikea": 20528, + "iker": 38653, + "ikh": 44655, + "ikh": 12758, + "iklan": 32028, + "iklan": 29584, + "iko": 35659, + "iko": 39272, + "ikon": 38543, + "ikon": 19156, + "iku": 17780, + "il": 543, + "il": 958, + "ila": 4344, + "ilah": 32211, + "ilan": 13889, + "ilan": 28076, + "iland": 20957, + "ilation": 16180, + "ilay": 45093, + "ild": 22278, + "ild": 17164, + "ile": 18398, + "ile": 989, + "iled": 3358, + "iler": 22446, + "iler": 3615, + "ilers": 8975, + "iles": 42274, + "ili": 2076, + "ili": 19601, + "ilia": 14855, + "ilian": 10272, + "iliary": 32585, + "ilife": 42835, + "ilike": 44989, + "ilinan": 48497, + "iling": 3299, + "ilio": 47256, + "ilion": 12561, + "ilis": 43442, + "ilit": 11178, + "ilities": 5446, + "ility": 1787, + "ilive": 26478, + "ill": 828, + "ill": 660, + "illa": 8877, + "illa": 3043, + "illac": 17218, + "illage": 48922, + "illard": 21920, + "illary": 33667, + "illas": 23404, + "ille": 18213, + "ille": 5559, + "illed": 2527, + "illeg": 35808, + "illegal": 7983, + "illegally": 24466, + "illegals": 40490, + "iller": 23341, + "iller": 2956, + "illers": 30547, + "illery": 14514, + "illes": 20037, + "illi": 1086, + "illi": 25187, + "illia": 48776, + "illiams": 30301, + "illian": 48775, + "illian": 17355, + "illic": 37152, + "illicit": 40998, + "illie": 26083, + "illin": 35868, + "illing": 2803, + "illini": 28957, + "illino": 8920, + "illinois": 9414, + "illion": 35542, + "illion": 2035, + "illness": 11145, + "illnesses": 33861, + "illo": 34153, + "illo": 7588, + "illon": 20516, + "ills": 1900, + "illu": 3025, + "illumin": 11446, + "illuminate": 43261, + "illuminated": 28814, + "illuminati": 34551, + "illuminating": 46601, + "illumination": 43680, + "illus": 41386, + "illusion": 20318, + "illusions": 47429, + "illustr": 6268, + "illustrate": 37468, + "illustrated": 13151, + "illustrates": 38129, + "illustrating": 43322, + "illustration": 6052, + "illustrations": 17852, + "illustrator": 16649, + "illustri": 43116, + "illustrious": 44304, + "illy": 11707, + "illy": 9532, + "ilm": 36326, + "ilo": 4220, + "ilo": 14835, + "ilove": 7183, + "ilove": 32914, + "iloveart": 41114, + "ilovemy": 28863, + "iloveyou": 28829, + "ils": 1543, + "ilt": 25334, + "ilton": 28494, + "ilu": 27337, + "ilwx": 43777, + "ily": 4881, + "ily": 1026, + "ilya": 33377, + "ilysm": 29228, + "im": 732, + "im": 1496, + "ima": 2414, + "ima": 6432, + "imac": 40675, + "imacele": 47281, + "imag": 2316, + "image": 24101, + "image": 2867, + "imagery": 22828, + "images": 4952, + "imagin": 18178, + "imaginary": 30417, + "imagination": 13783, + "imaginative": 47233, + "imagine": 35752, + "imagine": 4826, + "imagined": 18478, + "imagines": 47379, + "imaging": 14231, + "imagining": 27384, + "imam": 37552, + "imam": 19024, + "iman": 45684, + "iman": 16247, + "imation": 44566, + "imax": 32066, + "imc": 45616, + "imdanielpadilla": 36357, + "imdb": 30407, + "ime": 44937, + "ime": 31151, + "imel": 31594, + "iment": 37157, + "imer": 21802, + "imes": 47744, + "imf": 28403, + "img": 24157, + "imi": 23559, + "imin": 23942, + "imit": 23462, + "imitation": 41630, + "imma": 19487, + "immac": 25085, + "immaculate": 29649, + "immature": 45531, + "immedi": 7366, + "immediate": 14440, + "immediately": 10108, + "immen": 17278, + "immense": 22722, + "immensely": 35013, + "immer": 13954, + "immerse": 46240, + "immersion": 31861, + "immersive": 27521, + "immigr": 5851, + "immigrant": 16474, + "immigrants": 14460, + "immigration": 9588, + "imminent": 27299, + "immort": 39244, + "immortal": 24717, + "immun": 8961, + "immune": 15606, + "immuni": 44571, + "immunity": 26254, + "immuno": 24361, + "immunology": 44483, + "immunotherapy": 39185, + "imo": 26349, + "imo": 13738, + "imp": 3335, + "imp": 31037, + "impac": 7573, + "impact": 33036, + "impact": 3844, + "impacted": 21424, + "impactful": 41631, + "impacting": 29359, + "impacts": 15069, + "impair": 36451, + "impaired": 28028, + "impairment": 44501, + "impala": 36641, + "impe": 23612, + "impeach": 16874, + "impeach": 43497, + "impeachment": 32979, + "impeachtrump": 38006, + "impecc": 34511, + "impeccable": 40111, + "impending": 34486, + "imper": 7727, + "imperative": 39833, + "imperfect": 46034, + "imperi": 30911, + "imperial": 32425, + "imperial": 12361, + "imperialism": 48855, + "imperson": 25551, + "implant": 33106, + "implants": 32202, + "imple": 7423, + "implement": 17966, + "implementation": 15102, + "implemented": 24315, + "implementing": 22862, + "implic": 15269, + "implications": 19229, + "implo": 40337, + "impo": 45704, + "import": 2336, + "import": 16294, + "importance": 6821, + "important": 2829, + "importantly": 21580, + "imported": 28798, + "imports": 25286, + "impose": 35879, + "imposed": 25871, + "imposing": 42289, + "impossible": 9815, + "impre": 3763, + "impress": 20015, + "impressed": 9689, + "impression": 14468, + "impressionism": 36114, + "impressionist": 44904, + "impressions": 22276, + "impressive": 6634, + "imprint": 43863, + "imprison": 22141, + "imprisoned": 32999, + "imprisonment": 39024, + "impro": 2531, + "impromp": 28100, + "impromptu": 28611, + "improv": 22868, + "improve": 4971, + "improved": 9446, + "improvement": 10790, + "improvements": 16320, + "improves": 18035, + "improving": 10381, + "improvis": 32343, + "improvised": 40886, + "impulse": 29683, + "impy": 42690, + "imran": 19647, + "imran": 19212, + "imrankhan": 25956, + "imrankhanpti": 26688, + "ims": 17800, + "imsa": 37262, + "imv": 35731, + "imvkohli": 37136, + "imwith": 26822, + "imwithher": 32651, + "in": 512, + "in": 530, + "ina": 18026, + "ina": 1366, + "inability": 47517, + "inaccurate": 49192, + "inaction": 41916, + "inactive": 49274, + "inadequate": 43403, + "inak": 46549, + "inal": 19178, + "inals": 26438, + "inan": 26204, + "inappropriate": 26722, + "inari": 48620, + "inary": 11337, + "inas": 36731, + "inas": 12362, + "inated": 38530, + "ination": 4706, + "inau": 10832, + "inaugu": 11309, + "inaugur": 11448, + "inaugural": 11340, + "inaugurated": 29011, + "inauguration": 16805, + "inbound": 24420, + "inbox": 18683, + "inc": 14570, + "inc": 4438, + "incan": 45964, + "incar": 18070, + "incarcer": 26334, + "incarcerated": 49178, + "incarceration": 39887, + "incase": 30463, + "ince": 44303, + "incen": 13259, + "incense": 35059, + "incentive": 29024, + "incentives": 29813, + "inception": 36653, + "inch": 6523, + "incheon": 30645, + "inches": 10809, + "inci": 5747, + "incidence": 43371, + "incident": 10103, + "incidents": 22120, + "incindia": 26161, + "inciner": 46434, + "incl": 27857, + "incl": 13338, + "inclined": 45470, + "inclu": 1738, + "include": 5942, + "included": 7414, + "includes": 6197, + "including": 2814, + "inclusion": 12079, + "inclusive": 13393, + "income": 8044, + "incoming": 15416, + "incomparable": 36027, + "incompetent": 45069, + "incomplete": 34040, + "incon": 42372, + "inconvenience": 40563, + "incorpor": 19335, + "incorporate": 34168, + "incorporated": 29494, + "incorporating": 40303, + "incorrect": 31872, + "incre": 1870, + "increase": 5230, + "increased": 9156, + "increases": 13797, + "increasing": 10270, + "increasingly": 16106, + "incredi": 2883, + "incredible": 22128, + "incredible": 3457, + "incredibleindia": 24680, + "incredibles": 48641, + "incredibly": 9513, + "incu": 38830, + "incub": 24587, + "incubator": 35736, + "incumb": 32246, + "incumbent": 38038, + "incur": 42356, + "ind": 5386, + "ind": 4655, + "inda": 15710, + "inde": 2645, + "indeed": 10031, + "indefin": 29501, + "indefinitely": 43750, + "independ": 4147, + "independence": 23117, + "independence": 7955, + "independenceday": 25971, + "independent": 33844, + "independent": 7088, + "independently": 39831, + "inder": 29225, + "index": 35209, + "index": 9458, + "indhoven": 44229, + "indi": 1098, + "indi": 46536, + "india": 27067, + "india": 1762, + "indian": 7685, + "indian": 3606, + "indiana": 8615, + "indianapolis": 17196, + "indianfootball": 45979, + "indians": 10271, + "indic": 7136, + "indicate": 26679, + "indicated": 39416, + "indicates": 29412, + "indication": 38539, + "indicator": 24776, + "indicators": 30054, + "indicted": 34992, + "indictment": 42278, + "indie": 5260, + "indie": 9383, + "indiedev": 10863, + "indiefilm": 22588, + "indiegame": 17969, + "indiegamedev": 40466, + "indiegames": 35864, + "indiegogo": 38057, + "indies": 23618, + "indiffe": 41372, + "indigen": 8348, + "indigenous": 9303, + "indigo": 21002, + "indira": 43887, + "indirec": 26398, + "indirect": 35416, + "indivi": 5649, + "individu": 9574, + "individual": 8512, + "individually": 33782, + "individuals": 11990, + "indo": 26303, + "indo": 18297, + "indom": 42926, + "indone": 6180, + "indonesia": 7229, + "indonesian": 19593, + "indoor": 44478, + "indoor": 9546, + "indoors": 22973, + "indore": 46143, + "indu": 2298, + "induc": 7973, + "induced": 24103, + "inducted": 20596, + "inductee": 39558, + "inductees": 44796, + "induction": 18338, + "indul": 19402, + "indulg": 28388, + "indulge": 24851, + "indulgence": 40856, + "indulgent": 49147, + "industri": 5082, + "industrial": 30853, + "industrial": 7520, + "industries": 11700, + "industry": 47407, + "industry": 3318, + "indv": 16942, + "indy": 9821, + "indy": 10098, + "indycar": 20484, + "indyref": 22569, + "ine": 855, + "ine": 715, + "ineau": 38122, + "inec": 45214, + "ined": 2038, + "inee": 43252, + "inee": 7986, + "inees": 13056, + "ineffe": 47202, + "inely": 18234, + "inem": 48876, + "inema": 29232, + "inen": 44365, + "inequalities": 45507, + "inequality": 17372, + "iner": 17438, + "iner": 5155, + "iners": 41863, + "ines": 2137, + "inese": 35966, + "iness": 1463, + "inet": 8121, + "inette": 38911, + "inev": 19527, + "inevit": 45871, + "inevitable": 25004, + "inews": 24300, + "inexpensive": 38614, + "iney": 30254, + "inez": 12700, + "inf": 1529, + "inf": 35241, + "infamous": 18688, + "infan": 17219, + "infant": 19192, + "infantry": 21655, + "infants": 34726, + "infe": 7164, + "infec": 26088, + "infected": 26136, + "infection": 14774, + "infections": 22227, + "infectious": 29157, + "infeld": 25035, + "infer": 16258, + "inferno": 31290, + "infertility": 40701, + "infield": 48933, + "infiltr": 28683, + "infin": 6246, + "infinite": 12748, + "infiniti": 34644, + "infinity": 34863, + "infinity": 12895, + "infl": 7627, + "inflam": 16080, + "inflammation": 24893, + "inflammatory": 26831, + "inflatable": 30135, + "inflation": 17497, + "inflicted": 48188, + "influ": 4835, + "influen": 13229, + "influence": 9199, + "influenced": 21183, + "influencer": 25013, + "influencers": 29891, + "influences": 24926, + "influencing": 45126, + "influential": 17553, + "influenza": 39897, + "info": 5680, + "info": 2222, + "infographic": 10076, + "infographics": 33172, + "infor": 31773, + "inform": 10241, + "inform": 19449, + "informal": 25705, + "informat": 29625, + "informatics": 35685, + "information": 3204, + "informative": 19364, + "informed": 13876, + "informing": 45388, + "informs": 48440, + "infosec": 17863, + "infr": 29718, + "infra": 7312, + "infra": 45877, + "infrared": 22867, + "infrastructure": 9034, + "infringe": 44882, + "infringement": 48712, + "infront": 37668, + "infu": 15048, + "infuri": 48461, + "infused": 21461, + "infusion": 43464, + "ing": 653, + "ing": 519, + "inga": 15233, + "ingco": 40444, + "ingday": 16561, + "ingdon": 38731, + "inge": 11790, + "inge": 7071, + "inged": 30046, + "ingen": 19088, + "ingeni": 36884, + "inger": 33883, + "inger": 3541, + "ingfor": 33430, + "ingh": 9170, + "ingh": 30495, + "ingham": 24497, + "ingham": 4291, + "inghamshire": 39289, + "inghour": 42728, + "inging": 4066, + "ingl": 45662, + "ingle": 22228, + "ingle": 17005, + "ingles": 24490, + "ingley": 44428, + "inglis": 46327, + "ingly": 4796, + "ingnow": 34766, + "ingo": 30175, + "ingo": 9012, + "ingra": 45165, + "ingrad": 44124, + "ingram": 26998, + "ingredi": 9272, + "ingredient": 19799, + "ingredients": 11788, + "ingrid": 33496, + "ings": 895, + "ingthe": 20170, + "ingtips": 39373, + "ington": 11846, + "ington": 2156, + "ingu": 8714, + "ingual": 22795, + "ingue": 36838, + "ingui": 12788, + "inguish": 36146, + "inha": 32612, + "inhabit": 36189, + "inhabitants": 44968, + "inhal": 30786, + "inhe": 32617, + "inher": 24611, + "inherent": 47327, + "inherit": 34322, + "inheritance": 39341, + "inherited": 39111, + "inhi": 25557, + "inhibit": 32196, + "inho": 12984, + "ini": 6154, + "ini": 3581, + "inian": 36638, + "inim": 38717, + "inindia": 34021, + "ining": 1389, + "inist": 30976, + "init": 42670, + "initi": 4580, + "initial": 13980, + "initially": 28123, + "initials": 48794, + "initiated": 27756, + "initiation": 41009, + "initiative": 8152, + "initiatives": 16549, + "inity": 22126, + "inj": 5112, + "injec": 13688, + "injection": 21438, + "inju": 5006, + "injured": 7505, + "injuries": 9481, + "injury": 6223, + "injustice": 20541, + "ink": 4547, + "ink": 967, + "inka": 40685, + "inked": 29356, + "inki": 46176, + "inkigayo": 47882, + "inking": 37586, + "inks": 20966, + "inktober": 9387, + "inland": 21943, + "inlet": 35161, + "inline": 45004, + "inlove": 28415, + "inmate": 32341, + "inmates": 28216, + "inmy": 42657, + "inn": 27260, + "inn": 5569, + "inna": 35088, + "inner": 24512, + "inner": 6955, + "inning": 4415, + "innings": 11580, + "innis": 44059, + "inno": 7961, + "innocence": 26383, + "innocent": 11241, + "innov": 2890, + "innovate": 24549, + "innovation": 33063, + "innovation": 4272, + "innovations": 18817, + "innovative": 8494, + "innovator": 34735, + "innovators": 27834, + "ino": 4211, + "ino": 2691, + "inoa": 25649, + "inos": 21828, + "inous": 47801, + "inox": 22698, + "input": 16952, + "inputs": 48763, + "inqu": 10628, + "inqui": 18527, + "inquirer": 45172, + "inquiries": 29469, + "inquiry": 15865, + "inquis": 31171, + "inr": 36325, + "ins": 12786, + "ins": 1041, + "insan": 7875, + "insane": 10260, + "insanely": 27846, + "insanity": 26645, + "inscribed": 49168, + "inscription": 41127, + "insec": 15744, + "insect": 21297, + "insects": 18714, + "insecure": 35112, + "insecurity": 36964, + "inser": 13830, + "insert": 18807, + "insi": 3453, + "inside": 19141, + "inside": 2912, + "insider": 13300, + "insiders": 32171, + "insig": 40503, + "insight": 8795, + "insightful": 20354, + "insights": 8729, + "insignia": 48864, + "insist": 35504, + "insisted": 40423, + "insists": 27255, + "inski": 32630, + "insky": 24607, + "insol": 42366, + "insom": 21755, + "insomni": 42040, + "insomnia": 30598, + "inson": 21007, + "insp": 1597, + "inspec": 7915, + "inspect": 40815, + "inspecting": 40565, + "inspection": 15142, + "inspections": 39513, + "inspector": 20514, + "inspir": 2573, + "inspiration": 4195, + "inspirational": 41936, + "inspirational": 9855, + "inspirations": 35093, + "inspire": 27901, + "inspire": 8583, + "inspired": 39849, + "inspired": 3516, + "inspires": 17245, + "inspiring": 41847, + "inspiring": 5705, + "inspo": 26897, + "inst": 1264, + "inst": 1581, + "insta": 22411, + "insta": 11694, + "instability": 41377, + "instac": 46678, + "instaf": 33800, + "instag": 14612, + "instagood": 23718, + "instagram": 27910, + "instagram": 2659, + "instal": 38805, + "install": 6940, + "install": 11168, + "installation": 9358, + "installations": 27909, + "installed": 8807, + "installing": 18301, + "installment": 25315, + "installs": 45568, + "instalment": 47766, + "instance": 34572, + "instant": 38810, + "instant": 10635, + "instantly": 17703, + "instap": 23758, + "instapic": 34378, + "instaweather": 43078, + "instaweatherpro": 43150, + "inste": 3571, + "instead": 4191, + "instein": 13421, + "instem": 27030, + "instin": 23382, + "instinct": 30544, + "institu": 4257, + "institute": 5861, + "institutes": 43674, + "institution": 18823, + "institutional": 27442, + "institutions": 15207, + "instore": 41679, + "instru": 4544, + "instruc": 19648, + "instruction": 19407, + "instructional": 31022, + "instructions": 17040, + "instructor": 16087, + "instructors": 31998, + "instrument": 42196, + "instrument": 15806, + "instrumental": 23041, + "instruments": 14793, + "instyle": 41321, + "insu": 8805, + "insul": 9615, + "insulated": 42051, + "insulation": 28194, + "insulin": 29311, + "insult": 26673, + "insulting": 39646, + "insults": 40451, + "insur": 5024, + "insurance": 5870, + "insured": 31321, + "insurers": 43142, + "insurtech": 28716, + "int": 1828, + "int": 1207, + "inta": 38314, + "intact": 26870, + "intake": 19539, + "intan": 47695, + "inte": 1598, + "inte": 41900, + "intech": 26504, + "inted": 6147, + "integr": 5151, + "integral": 27018, + "integrate": 25735, + "integrated": 12797, + "integrating": 31555, + "integration": 12583, + "integrity": 14791, + "intel": 11778, + "intel": 11426, + "intellec": 13281, + "intellect": 47828, + "intellectu": 31966, + "intellectual": 18069, + "intelli": 5324, + "intellig": 5632, + "intelligence": 6846, + "intelligent": 14063, + "inten": 2967, + "intend": 36674, + "intended": 16812, + "intense": 10258, + "intensi": 22928, + "intensity": 19956, + "intensive": 21049, + "intent": 18881, + "intention": 26786, + "intentional": 29536, + "intentionally": 31215, + "intentions": 26710, + "inter": 1006, + "inter": 10093, + "interact": 21736, + "interacting": 35045, + "interaction": 17650, + "interactions": 22162, + "interactive": 9456, + "intercep": 23676, + "interception": 48762, + "interceptions": 45313, + "interchange": 34222, + "intercontinental": 31983, + "interdisciplinary": 38132, + "intere": 2008, + "interest": 5095, + "interested": 4620, + "interesting": 3628, + "interests": 16425, + "interface": 18753, + "interfaith": 38399, + "interference": 29099, + "interim": 19509, + "interior": 10700, + "interior": 7305, + "interiordesign": 12902, + "interiors": 14836, + "intermedi": 20246, + "intermediate": 24304, + "intermission": 44805, + "intermitt": 44946, + "intern": 9976, + "intern": 14068, + "internal": 11285, + "internally": 41134, + "internation": 42534, + "international": 8566, + "international": 2436, + "internationaldayof": 41518, + "internationally": 24059, + "internationalwomensday": 17682, + "interne": 32713, + "internet": 30180, + "internet": 4757, + "internetof": 44449, + "internetofthings": 45925, + "interns": 19902, + "internship": 16661, + "internships": 39410, + "interoper": 45754, + "interpre": 11162, + "interpret": 49154, + "interpret": 40459, + "interpretation": 20652, + "interpreted": 42157, + "interpreting": 46525, + "interro": 29548, + "interrup": 21609, + "interrupt": 48449, + "interrupted": 30288, + "intersec": 45246, + "intersection": 19210, + "interstate": 21963, + "interstellar": 41506, + "interval": 36032, + "intervals": 44884, + "interven": 18245, + "intervention": 16804, + "interventions": 28848, + "interview": 2885, + "interviewed": 11688, + "interviewing": 16399, + "interviews": 9910, + "intestin": 37938, + "intestinal": 38896, + "inthe": 7486, + "inti": 14459, + "intim": 38832, + "intimacy": 46430, + "intimate": 16382, + "intimid": 24041, + "intimidating": 44405, + "intimidation": 49258, + "inting": 15571, + "intl": 38186, + "intl": 14224, + "intment": 9020, + "intments": 21420, + "into": 35235, + "into": 1095, + "intoler": 28534, + "intolerance": 37808, + "intothe": 38511, + "intra": 20922, + "intrac": 46195, + "intram": 40956, + "intre": 29397, + "intrepid": 39127, + "intri": 15421, + "intric": 23763, + "intricate": 29616, + "intrigu": 18856, + "intrigue": 45140, + "intrigued": 40034, + "intriguing": 24334, + "intrin": 45181, + "intro": 2999, + "intro": 13224, + "introduc": 3621, + "introduce": 9813, + "introduced": 10446, + "introduces": 12933, + "introducing": 6256, + "introduction": 11812, + "introductory": 38121, + "intru": 22949, + "ints": 2514, + "intu": 17225, + "intuition": 40897, + "intuitive": 35224, + "inu": 21131, + "inuit": 41250, + "inus": 45857, + "inv": 2279, + "inv": 43786, + "inva": 10084, + "invade": 34609, + "invaded": 32596, + "invaders": 35188, + "invading": 40101, + "invali": 31592, + "invalid": 46998, + "invaluable": 33976, + "invasi": 38100, + "invasion": 13378, + "invasive": 19554, + "inve": 2024, + "inven": 26233, + "invent": 11665, + "invent": 23558, + "invented": 14100, + "invention": 23607, + "inventions": 44914, + "inventor": 22836, + "inventory": 19444, + "inver": 12061, + "inverness": 33080, + "inverte": 46397, + "inverted": 40709, + "invest": 4180, + "invest": 9716, + "invested": 22536, + "investig": 4626, + "investigate": 15703, + "investigated": 29180, + "investigates": 29621, + "investigating": 13713, + "investigation": 8194, + "investigations": 24020, + "investigative": 30233, + "investigator": 30528, + "investigators": 24121, + "investin": 40195, + "investing": 10554, + "investment": 5605, + "investments": 14675, + "investor": 15490, + "investors": 10486, + "invests": 38378, + "invic": 25253, + "invigor": 48722, + "invin": 30252, + "invincible": 38052, + "invisible": 16093, + "invit": 12454, + "invitation": 15032, + "invitational": 14511, + "invitations": 40120, + "invite": 8109, + "invited": 7731, + "invites": 16034, + "inviting": 14349, + "invo": 29417, + "invol": 4000, + "involve": 26325, + "involved": 5320, + "involvement": 19502, + "involves": 22652, + "involving": 14786, + "inwx": 35674, + "iny": 23257, + "inyour": 47954, + "io": 3167, + "io": 3752, + "ioc": 43018, + "iom": 33000, + "iom": 31135, + "ion": 14871, + "ion": 3668, + "ions": 26289, + "ior": 7354, + "ior": 2498, + "iority": 46016, + "iors": 6427, + "ios": 6614, + "iot": 32694, + "iot": 6627, + "iota": 37294, + "ious": 6994, + "iously": 38233, + "iow": 7439, + "iowa": 38847, + "iowa": 8290, + "ip": 1719, + "ip": 8600, + "ipa": 11199, + "ipad": 39067, + "ipad": 7491, + "ipads": 35281, + "ipc": 41981, + "iphone": 26030, + "iphone": 4314, + "iphones": 37561, + "ipl": 13440, + "ipment": 37824, + "ipo": 40218, + "ipo": 24090, + "ipod": 17889, + "ipp": 31706, + "ips": 26910, + "ipsw": 22221, + "ipswich": 24494, + "iq": 15554, + "iq": 19996, + "iqbal": 33553, + "ir": 582, + "ir": 742, + "ira": 4923, + "ira": 5371, + "irah": 35724, + "iran": 19273, + "iran": 5075, + "irandeal": 46533, + "irani": 37984, + "iranian": 14158, + "iraq": 8543, + "iraqi": 18617, + "irc": 41527, + "ird": 2770, + "ire": 3013, + "ire": 1454, + "ired": 32728, + "ired": 2995, + "ireland": 32806, + "ireland": 4157, + "irene": 21600, + "ires": 12435, + "irez": 21581, + "irgc": 47942, + "iri": 2155, + "iri": 13880, + "irical": 33366, + "irie": 42979, + "irina": 46664, + "iring": 10169, + "iris": 16437, + "irish": 9386, + "irish": 4889, + "irl": 34494, + "irl": 8570, + "irling": 26493, + "irls": 24344, + "irma": 22406, + "irn": 42603, + "iro": 23209, + "iro": 7280, + "iron": 7699, + "iron": 5391, + "ironic": 24518, + "ironically": 36779, + "ironing": 46655, + "ironman": 20330, + "irons": 30032, + "irony": 20681, + "irport": 27769, + "irr": 24641, + "irrational": 47413, + "irregular": 38692, + "irrelevant": 34677, + "irresi": 31200, + "irresistible": 35252, + "irresponsible": 44714, + "irri": 21484, + "irrigation": 23761, + "irrit": 24218, + "irs": 6086, + "irst": 32701, + "iru": 48206, + "irvin": 47053, + "irvine": 24201, + "irving": 19738, + "irwin": 23750, + "iry": 7239, + "is": 595, + "is": 533, + "isa": 11034, + "isa": 6536, + "isaac": 37544, + "isaac": 13659, + "isab": 13357, + "isabel": 27466, + "isabella": 26192, + "isabelle": 31072, + "isable": 46631, + "isai": 15365, + "isaiah": 17952, + "isak": 40619, + "isance": 46893, + "isation": 7194, + "isback": 43811, + "isc": 39316, + "isch": 47888, + "isco": 5736, + "iscoming": 26458, + "isd": 46816, + "isd": 12002, + "ise": 7669, + "ise": 1479, + "ised": 2861, + "iselle": 48491, + "iser": 23080, + "iser": 5626, + "isers": 34879, + "ises": 5153, + "isf": 44036, + "isgreat": 34595, + "ish": 6844, + "ish": 1061, + "isha": 28050, + "ishable": 37949, + "ished": 35341, + "ishere": 46053, + "ishi": 26224, + "ishq": 27996, + "ishqba": 32503, + "ishqbaaaz": 36591, + "isi": 7233, + "isi": 17880, + "isil": 34636, + "isin": 37676, + "ising": 3426, + "isis": 7531, + "isk": 30171, + "isl": 31368, + "isla": 22807, + "islam": 6003, + "islam": 8770, + "islamabad": 19959, + "islamic": 31627, + "islamic": 9552, + "islamist": 38798, + "islamophobia": 43459, + "island": 13408, + "island": 2619, + "islander": 45651, + "islanders": 27804, + "islands": 7145, + "islay": 49279, + "isle": 19082, + "isle": 11849, + "isleof": 24718, + "isles": 21816, + "islife": 26433, + "islington": 34945, + "ism": 47730, + "ism": 1935, + "isma": 43937, + "ismail": 36140, + "isme": 43570, + "ismo": 41926, + "isms": 18700, + "isn": 2923, + "isner": 48246, + "isnow": 43694, + "isnt": 19416, + "iso": 2462, + "iso": 12263, + "isol": 11414, + "isolated": 19044, + "isolation": 26400, + "ison": 12949, + "ison": 4553, + "isons": 33318, + "isoo": 35857, + "isp": 31397, + "isp": 39041, + "isra": 3591, + "israel": 20837, + "israel": 4779, + "israeli": 8994, + "israelis": 45713, + "isreal": 47147, + "isro": 44841, + "iss": 11738, + "iss": 4950, + "issa": 38579, + "issa": 7560, + "issan": 49358, + "issance": 40828, + "issant": 38828, + "isse": 18986, + "ission": 37946, + "issu": 2049, + "issue": 3202, + "issued": 9246, + "issues": 4082, + "issuing": 37226, + "ist": 9751, + "ist": 2304, + "istanbul": 12258, + "istandwith": 33820, + "iste": 32563, + "ister": 14555, + "isthe": 46748, + "istic": 29556, + "ists": 8426, + "isu": 17030, + "isu": 23328, + "it": 529, + "it": 585, + "ita": 36920, + "ita": 2864, + "itable": 8915, + "ital": 2306, + "ital": 1660, + "itali": 11644, + "italia": 11025, + "italian": 20264, + "italian": 5175, + "italians": 44744, + "italk": 32894, + "italy": 4052, + "itan": 18383, + "itans": 40711, + "itar": 47161, + "itarian": 11599, + "itary": 17604, + "itas": 31634, + "itas": 13436, + "itate": 42457, + "itated": 36744, + "itation": 5070, + "itative": 22892, + "itc": 36449, + "itch": 2387, + "itch": 8147, + "itchen": 32664, + "itchy": 41980, + "ite": 2732, + "ite": 802, + "iteam": 37828, + "itec": 3099, + "itec": 43936, + "itech": 44215, + "itech": 23040, + "ited": 8603, + "ited": 1108, + "itel": 44638, + "itely": 4605, + "item": 8532, + "items": 6207, + "iter": 7938, + "iter": 19773, + "iteracy": 39634, + "iterate": 43106, + "iteration": 38790, + "ites": 2454, + "itez": 42131, + "itf": 35436, + "itfc": 36519, + "ith": 6133, + "ith": 1757, + "ithaca": 46257, + "iti": 760, + "iti": 6165, + "itia": 22634, + "itian": 23365, + "itic": 11950, + "itical": 48767, + "itics": 33967, + "ities": 41423, + "ities": 1480, + "itim": 15676, + "itiner": 32803, + "itinerary": 41564, + "iting": 1257, + "ition": 25263, + "ition": 1104, + "itions": 5540, + "itious": 13329, + "itis": 33539, + "itis": 8388, + "itive": 3067, + "itly": 42240, + "ito": 22167, + "ito": 4661, + "iton": 21119, + "itor": 47267, + "itor": 4584, + "itors": 22005, + "itos": 24560, + "its": 7140, + "its": 902, + "itsa": 45032, + "itself": 7290, + "itsme": 41125, + "itss": 47040, + "itt": 1031, + "itt": 11228, + "itta": 21233, + "itte": 31962, + "itted": 24429, + "itten": 30014, + "itten": 4343, + "itter": 11456, + "itters": 13082, + "itti": 28629, + "ittin": 25646, + "itting": 3147, + "ittle": 24208, + "ittle": 21366, + "ittles": 38989, + "itton": 25707, + "itty": 35096, + "itu": 1668, + "itu": 32128, + "itude": 43382, + "itude": 5012, + "itudes": 20459, + "itunes": 7007, + "itup": 35838, + "iture": 25547, + "itus": 24364, + "itutes": 32883, + "itv": 20159, + "itv": 12805, + "ity": 2480, + "ity": 696, + "itya": 32055, + "itz": 14544, + "itz": 7807, + "iu": 14292, + "iu": 15575, + "ium": 10762, + "ius": 6740, + "iv": 6775, + "iv": 9315, + "iva": 42463, + "ivan": 15544, + "ivan": 15689, + "ivanka": 37914, + "ive": 26885, + "ive": 8653, + "ived": 15654, + "iver": 36849, + "iver": 44254, + "ives": 27333, + "ivf": 39159, + "iving": 45136, + "ivory": 16776, + "ivote": 45835, + "ivy": 36939, + "ivy": 16045, + "iw": 13058, + "iw": 46604, + "iwant": 42747, + "iwd": 16815, + "iwm": 44237, + "ix": 13272, + "ix": 8756, + "iy": 13704, + "iya": 18595, + "iyaki": 48395, + "iz": 2845, + "iz": 8407, + "iza": 37704, + "ization": 10847, + "ize": 10885, + "ized": 7690, + "izen": 34776, + "izer": 23895, + "izes": 45434, + "izing": 17354, + "izo": 46910, + "izz": 31779, + "izz": 46128, + "izzy": 28861, + "j": 73, + "j": 329, + "ja": 1586, + "ja": 2641, + "jaan": 25052, + "jab": 8059, + "jab": 9439, + "jac": 2293, + "jac": 30198, + "jace": 43286, + "jack": 2679, + "jack": 3267, + "jacked": 27923, + "jacket": 6164, + "jackets": 14745, + "jacki": 47418, + "jackie": 28023, + "jackie": 11716, + "jacking": 40929, + "jackman": 35723, + "jackpot": 23926, + "jacks": 19649, + "jackson": 12321, + "jackson": 4363, + "jacksonville": 19263, + "jaco": 6840, + "jacob": 14385, + "jacob": 9222, + "jacobs": 17482, + "jacobson": 46826, + "jacqu": 14495, + "jacqueline": 22843, + "jacques": 17799, + "jad": 12976, + "jad": 38691, + "jada": 37416, + "jade": 25123, + "jade": 14513, + "jaden": 37174, + "jadine": 37445, + "jae": 16869, + "jae": 15765, + "jaejoong": 43610, + "jaf": 19362, + "jag": 7984, + "jag": 36236, + "jagan": 48530, + "jagger": 30835, + "jags": 31086, + "jagu": 10096, + "jaguar": 44777, + "jaguar": 14757, + "jaguars": 21854, + "jah": 20067, + "jah": 11084, + "jahan": 44404, + "jahan": 47827, + "jai": 10542, + "jai": 13819, + "jail": 18574, + "jail": 9332, + "jailbreak": 45990, + "jailed": 19456, + "jails": 47833, + "jaime": 24716, + "jain": 21999, + "jaipur": 23593, + "jais": 48607, + "jait": 28910, + "jaitley": 32776, + "jak": 9225, + "jak": 30589, + "jakarta": 15471, + "jake": 13140, + "jake": 7419, + "jakob": 47358, + "jal": 8380, + "jal": 26773, + "jalan": 27270, + "jalap": 49081, + "jalape": 34263, + "jalapeño": 43017, + "jalen": 33548, + "jam": 1434, + "jam": 5201, + "jama": 8977, + "jama": 35366, + "jamaica": 13019, + "jamaican": 25144, + "jamal": 26108, + "jambo": 35599, + "jamboree": 38506, + "jame": 12341, + "james": 6963, + "james": 2392, + "jamesbond": 44704, + "jamesc": 47004, + "jameson": 31731, + "jami": 15092, + "jamie": 16454, + "jamie": 8078, + "jamiedor": 34310, + "jamiedornan": 34896, + "jammed": 35590, + "jammin": 35223, + "jamming": 25862, + "jammu": 25926, + "jams": 20243, + "jan": 1891, + "jan": 3334, + "jana": 18182, + "jane": 12389, + "jane": 6736, + "janeiro": 31740, + "janet": 29665, + "janet": 15872, + "jang": 41526, + "jang": 22074, + "jani": 22606, + "janice": 36048, + "janine": 46896, + "janis": 44233, + "jann": 35377, + "jans": 22578, + "jansen": 45354, + "janu": 3623, + "january": 3697, + "jap": 2299, + "jap": 49062, + "japan": 4502, + "japan": 3400, + "japanese": 27211, + "japanese": 4925, + "japs": 42121, + "jar": 5120, + "jar": 10837, + "jard": 25778, + "jardin": 37371, + "jare": 17654, + "jared": 35597, + "jared": 12571, + "jaredle": 36739, + "jaredleto": 37106, + "jaro": 35505, + "jarpad": 44497, + "jarre": 23385, + "jarrett": 30531, + "jars": 27583, + "jarvis": 29286, + "jas": 4492, + "jas": 17559, + "jasmin": 42989, + "jasmin": 47700, + "jasmine": 17056, + "jason": 10009, + "jason": 5395, + "jasper": 19827, + "jat": 26106, + "jau": 26932, + "jauregui": 48175, + "jav": 6234, + "java": 12918, + "javascri": 16289, + "javascript": 16423, + "jave": 46218, + "javed": 42268, + "javelin": 41701, + "javi": 47627, + "javier": 23307, + "jaw": 14804, + "jaw": 17307, + "jawa": 44790, + "jaws": 25491, + "jax": 22348, + "jax": 12390, + "jay": 3427, + "jay": 4155, + "jaya": 21960, + "jayanti": 37732, + "jaye": 45703, + "jayne": 35228, + "jays": 12393, + "jaz": 3465, + "jaz": 32874, + "jazeera": 38260, + "jazz": 11488, + "jazz": 4528, + "jazzfest": 36683, + "jazzy": 28191, + "jb": 21915, + "jb": 13637, + "jc": 14991, + "jc": 11517, + "jd": 18289, + "jd": 14125, + "jdm": 42013, + "je": 1013, + "je": 8776, + "jeal": 9964, + "jealous": 11093, + "jealousy": 37654, + "jean": 13943, + "jean": 6473, + "jeanette": 48167, + "jeanne": 29201, + "jeans": 10157, + "jeb": 35101, + "jec": 1347, + "ject": 6070, + "jed": 12166, + "jed": 38748, + "jeddah": 40982, + "jedi": 16681, + "jee": 29250, + "jee": 14870, + "jeep": 16593, + "jeep": 11286, + "jeeplife": 43100, + "jeet": 45542, + "jeet": 30944, + "jef": 10276, + "jeff": 6245, + "jeff": 5550, + "jefferson": 44711, + "jefferson": 13976, + "jeffery": 41470, + "jeffree": 45994, + "jeffrey": 32886, + "jeffrey": 16027, + "jeho": 42437, + "jeky": 43893, + "jekyll": 49405, + "jel": 9794, + "jelena": 48218, + "jelly": 19110, + "jelly": 13762, + "jellyfish": 30988, + "jem": 46326, + "jem": 37530, + "jen": 2554, + "jen": 12997, + "jenkins": 16162, + "jenn": 33921, + "jenn": 29869, + "jenna": 17125, + "jenner": 14260, + "jenni": 6774, + "jennie": 28875, + "jennifer": 19786, + "jennifer": 8613, + "jennings": 21564, + "jenny": 20165, + "jenny": 13414, + "jens": 40806, + "jensen": 35558, + "jensen": 19004, + "jensenackles": 41011, + "jeon": 45200, + "jeon": 43337, + "jeong": 47146, + "jeong": 39264, + "jeopar": 22988, + "jeopardy": 29613, + "jer": 2310, + "jer": 35307, + "jere": 5614, + "jeremi": 22362, + "jeremiah": 27301, + "jeremy": 14656, + "jeremy": 8127, + "jeremycorbyn": 37484, + "jeric": 25084, + "jericho": 28892, + "jerk": 23917, + "jerky": 40079, + "jermaine": 40722, + "jerome": 19876, + "jerry": 18163, + "jerry": 9164, + "jersey": 21921, + "jersey": 4471, + "jerseys": 15518, + "jerus": 12257, + "jerusalem": 12557, + "jes": 7686, + "jes": 35826, + "jess": 5313, + "jess": 13758, + "jesse": 23112, + "jesse": 11770, + "jessi": 24373, + "jessic": 14881, + "jessica": 45421, + "jessica": 8178, + "jessie": 19424, + "jester": 44225, + "jesu": 19777, + "jesuit": 33234, + "jesus": 4070, + "jet": 11515, + "jet": 6565, + "jetblue": 45021, + "jeter": 38450, + "jets": 38584, + "jets": 10025, + "jett": 44541, + "jetty": 46382, + "jew": 27450, + "jewel": 4880, + "jewel": 17591, + "jewell": 9777, + "jewellers": 46265, + "jewellery": 11192, + "jewelry": 28018, + "jewelry": 6039, + "jewels": 20205, + "jewish": 29594, + "jewish": 9104, + "jews": 14200, + "jf": 31130, + "jf": 33718, + "jfc": 43652, + "jfk": 18486, + "jg": 41986, + "jg": 35138, + "jh": 24858, + "jh": 21485, + "jha": 47012, + "jha": 38092, + "jhal": 45695, + "jhar": 31546, + "jharkhand": 39001, + "jhb": 34631, + "ji": 3252, + "ji": 2697, + "jia": 32907, + "jian": 33427, + "jiang": 43309, + "jiang": 25762, + "jic": 48350, + "jic": 40215, + "jid": 24403, + "jie": 40005, + "jig": 15136, + "jig": 47430, + "jigsaw": 32987, + "jiha": 23194, + "jihad": 29637, + "jihoon": 44765, + "jil": 36225, + "jill": 24136, + "jill": 15254, + "jillian": 37820, + "jim": 3190, + "jim": 4550, + "jima": 20679, + "jimcantore": 43950, + "jimenez": 35947, + "jimi": 30565, + "jimin": 16286, + "jimmie": 45679, + "jimmy": 12215, + "jimmy": 6817, + "jimmyfallon": 45265, + "jin": 7927, + "jin": 8485, + "jind": 40609, + "jing": 34933, + "jing": 28607, + "jingle": 28699, + "jinnah": 43141, + "jinping": 39308, + "jinx": 42977, + "jinyoung": 38051, + "jio": 40501, + "jis": 25988, + "jis": 23515, + "jisoo": 43070, + "jit": 11947, + "jit": 20308, + "jitsu": 24530, + "jiu": 43351, + "jiu": 44123, + "jj": 12502, + "jj": 12790, + "jk": 20189, + "jk": 9702, + "jkt": 21494, + "jl": 25027, + "jl": 22911, + "jlo": 31017, + "jm": 24044, + "jm": 18657, + "jn": 24576, + "jn": 21717, + "jnr": 37145, + "jnu": 47142, + "jo": 683, + "jo": 3804, + "joachim": 48979, + "joan": 28064, + "joan": 12710, + "joann": 35484, + "joanna": 25357, + "joanne": 43736, + "joanne": 25092, + "joao": 45666, + "joaqu": 25140, + "joaquin": 30745, + "job": 13114, + "job": 2075, + "jobs": 3735, + "jobsearch": 45459, + "joburg": 39343, + "jocel": 36879, + "jocelyn": 47259, + "jock": 34485, + "jockey": 20126, + "jodh": 48689, + "jodi": 36812, + "jodi": 26888, + "jodie": 33100, + "jody": 32959, + "joe": 9309, + "joe": 3305, + "joel": 19819, + "joel": 11429, + "joes": 34756, + "joey": 16281, + "joey": 10455, + "jog": 37967, + "jog": 31691, + "jogging": 37922, + "joh": 1201, + "johan": 17416, + "johan": 27789, + "johann": 31180, + "johanna": 41494, + "johannes": 37779, + "johannesburg": 28377, + "johansson": 41512, + "johar": 34871, + "john": 2004, + "john": 1742, + "johncena": 46820, + "johnnie": 47947, + "johnny": 14464, + "johnny": 6904, + "johns": 14515, + "johnson": 26036, + "johnson": 4010, + "johnston": 19791, + "johnstone": 40766, + "johor": 34750, + "join": 14737, + "join": 1384, + "joined": 4954, + "joining": 5118, + "joins": 5681, + "joint": 6640, + "jointhe": 30422, + "jointly": 37471, + "joints": 27204, + "jojo": 41484, + "jojo": 22075, + "joke": 7198, + "joker": 18200, + "jokers": 44101, + "jokes": 11336, + "joking": 26112, + "joko": 44975, + "jol": 9174, + "jol": 36470, + "jolie": 31633, + "jolla": 46109, + "jolly": 21516, + "jom": 32152, + "jon": 3026, + "jon": 6139, + "jona": 6629, + "jonah": 47934, + "jonah": 27556, + "jonas": 42373, + "jonas": 13650, + "jonathan": 19026, + "jonathan": 7762, + "jone": 33934, + "jones": 19091, + "jones": 3538, + "jong": 20214, + "jong": 14726, + "jonghyun": 29023, + "jongin": 36957, + "joni": 43177, + "jonny": 28454, + "jonny": 21895, + "joo": 25807, + "joo": 27680, + "joom": 47543, + "joon": 18547, + "joong": 26544, + "jop": 30486, + "joplin": 42688, + "jor": 2482, + "jor": 31595, + "jordan": 14644, + "jordan": 4388, + "jordani": 46898, + "jordi": 44795, + "jorge": 48761, + "jorge": 18225, + "jos": 20560, + "jos": 19661, + "jose": 4647, + "jose": 7075, + "josef": 36584, + "josel": 47800, + "joseph": 14163, + "joseph": 6478, + "josephine": 34866, + "josh": 9998, + "josh": 5679, + "joshi": 24786, + "joshu": 9112, + "joshua": 11852, + "josi": 33583, + "josie": 33167, + "joss": 42834, + "josé": 27922, + "jou": 19921, + "jou": 32029, + "jour": 2078, + "jour": 17142, + "journ": 4563, + "journal": 6626, + "journalism": 10123, + "journalist": 9914, + "journalists": 12249, + "journals": 24391, + "journe": 48833, + "journey": 32156, + "journey": 3749, + "journeys": 23329, + "journo": 37034, + "journos": 46437, + "jovi": 33866, + "joy": 6308, + "joy": 4273, + "joyce": 43753, + "joyce": 15275, + "joye": 34052, + "joyeux": 41876, + "joyful": 24139, + "joyous": 32245, + "joyride": 46949, + "joys": 22996, + "jp": 18249, + "jp": 10557, + "jpg": 36950, + "jpn": 36212, + "jr": 13973, + "jr": 3605, + "js": 46243, + "js": 8006, + "jst": 26523, + "jt": 39480, + "jt": 18119, + "ju": 669, + "ju": 9970, + "jual": 38720, + "juan": 17148, + "juan": 9274, + "juana": 9081, + "jubi": 15485, + "jubil": 47743, + "jubilee": 16907, + "juco": 31570, + "jud": 8363, + "juda": 32478, + "judah": 41066, + "judaism": 42217, + "judas": 39532, + "judd": 29770, + "judg": 20012, + "judge": 16824, + "judge": 5656, + "judged": 33453, + "judgement": 25246, + "judges": 12575, + "judging": 16570, + "judgment": 24191, + "judi": 42546, + "judice": 28032, + "judicial": 19579, + "judiciary": 24545, + "judith": 24047, + "judo": 27011, + "judy": 34663, + "judy": 16510, + "jug": 27619, + "jugg": 38628, + "juic": 38761, + "juice": 37954, + "juice": 6916, + "juices": 36757, + "juicy": 17623, + "juju": 43020, + "juke": 32519, + "jukebox": 36411, + "jul": 34662, + "jul": 15975, + "jule": 40819, + "jules": 21996, + "juli": 3614, + "juli": 49160, + "julia": 10207, + "julian": 25459, + "julian": 12643, + "juliana": 46059, + "julie": 22534, + "julie": 10505, + "julien": 32595, + "juliet": 20641, + "juliette": 44804, + "julio": 24888, + "julius": 20870, + "july": 2272, + "jum": 20791, + "jumbo": 24678, + "jume": 45989, + "jump": 5519, + "jump": 6423, + "jumped": 16901, + "jumper": 16558, + "jumpers": 36485, + "jumping": 11476, + "jumpman": 48803, + "jumps": 18911, + "jumpsuit": 31044, + "jun": 1637, + "jun": 7719, + "junction": 11320, + "june": 23188, + "june": 2345, + "jung": 13086, + "jung": 13031, + "jungkook": 20040, + "jungle": 42421, + "jungle": 10865, + "juni": 4029, + "junior": 21167, + "junior": 5027, + "juniors": 16811, + "juniper": 33829, + "junk": 16000, + "junkie": 27613, + "junkies": 41207, + "juno": 28845, + "junto": 34282, + "jupit": 15270, + "jupiter": 16212, + "jur": 15896, + "jura": 14715, + "jurassic": 28844, + "jurassic": 21255, + "jurgen": 39263, + "juris": 37010, + "jurisdic": 37714, + "jury": 12931, + "jus": 14999, + "just": 1770, + "just": 761, + "justi": 14700, + "justic": 30399, + "justice": 16904, + "justice": 3604, + "justicefor": 25812, + "justiceleague": 41929, + "justices": 44356, + "justified": 34546, + "justify": 28192, + "justin": 7537, + "justin": 4394, + "justinbieber": 12501, + "justine": 34418, + "justintrudeau": 32184, + "justsaying": 42922, + "juve": 47717, + "juve": 23092, + "juven": 12944, + "juvenile": 19333, + "juvent": 13908, + "juventus": 47378, + "juventus": 16208, + "jux": 33552, + "juxta": 34964, + "jv": 37932, + "jv": 11805, + "jw": 30221, + "jw": 24215, + "jy": 20979, + "jyo": 27378, + "jyoti": 48696, + "jä": 45381, + "k": 74, + "k": 330, + "ka": 1595, + "ka": 1525, + "kaa": 34496, + "kab": 6554, + "kab": 45134, + "kabaddi": 41749, + "kabir": 38619, + "kabo": 47974, + "kabul": 26160, + "kac": 21693, + "kach": 14341, + "kad": 10901, + "kade": 41130, + "kaduna": 38053, + "kae": 22542, + "kaeper": 30070, + "kaepernick": 30713, + "kaf": 19870, + "kag": 13666, + "kag": 31003, + "kah": 16068, + "kah": 15463, + "kahn": 35397, + "kai": 12752, + "kai": 9601, + "kaido": 40255, + "kail": 23623, + "kaine": 39028, + "kair": 33027, + "kaiser": 43685, + "kaiser": 29960, + "kait": 19326, + "kaitlyn": 34948, + "kaj": 44788, + "kaj": 40381, + "kak": 10401, + "kak": 40128, + "kaka": 47689, + "kaku": 30900, + "kal": 4187, + "kal": 18712, + "kala": 45453, + "kala": 33105, + "kalam": 40142, + "kalamaz": 42328, + "kalamazoo": 46264, + "kalb": 34483, + "kale": 17162, + "kale": 16625, + "kaleido": 41144, + "kali": 17844, + "kali": 26964, + "kalin": 42776, + "kalyan": 23825, + "kam": 4104, + "kam": 26011, + "kamal": 31371, + "kamal": 28619, + "kamala": 45003, + "kame": 45235, + "kamen": 40738, + "kami": 28707, + "kamloops": 36602, + "kamp": 35179, + "kamp": 29522, + "kampala": 37134, + "kan": 2532, + "kan": 8101, + "kana": 35178, + "kand": 17478, + "kane": 32218, + "kane": 9765, + "kang": 12226, + "kang": 20789, + "kangar": 20622, + "kangaroo": 25513, + "kani": 40907, + "kani": 41948, + "kann": 18533, + "kannada": 30053, + "kano": 28201, + "kans": 34012, + "kansas": 25507, + "kansas": 6539, + "kansascity": 46134, + "kant": 39923, + "kant": 47132, + "kanth": 24427, + "kanu": 44565, + "kany": 13590, + "kanye": 29680, + "kanye": 14965, + "kanyewest": 31943, + "kap": 6804, + "kap": 45279, + "kapam": 48561, + "kapil": 32337, + "kapil": 42709, + "kapilshar": 48978, + "kaplan": 37401, + "kapoor": 9117, + "kapp": 36717, + "kappa": 20239, + "kapur": 42371, + "kar": 1813, + "kar": 5933, + "kara": 12552, + "karab": 40916, + "karachi": 13671, + "karak": 40372, + "karan": 20077, + "karan": 20931, + "karanjohar": 47621, + "karao": 16262, + "karaoke": 16640, + "karate": 21211, + "kardashi": 13619, + "kardashian": 14578, + "kare": 14310, + "kare": 38354, + "kareem": 38885, + "kareena": 41569, + "karen": 17719, + "karen": 10349, + "kari": 15339, + "kari": 15161, + "karim": 33477, + "karin": 43917, + "karina": 40250, + "karl": 20967, + "karl": 13134, + "karla": 42309, + "karma": 17658, + "karnat": 13994, + "karnataka": 15515, + "karo": 45305, + "kart": 47841, + "kart": 21310, + "karthik": 41397, + "karti": 23053, + "kartikeyan": 32584, + "karting": 41655, + "kas": 6119, + "kas": 14372, + "kasa": 46111, + "kash": 6954, + "kash": 21371, + "kashi": 47945, + "kashmir": 20251, + "kashmir": 10783, + "kashmiri": 35331, + "kasi": 45870, + "kasi": 32819, + "kasich": 39666, + "kat": 2844, + "kat": 9341, + "kata": 14558, + "kate": 11620, + "kate": 6699, + "katelyn": 45963, + "kath": 7386, + "kath": 19745, + "katharine": 41473, + "katherine": 17687, + "kathle": 18721, + "kathleen": 21709, + "kathmandu": 34456, + "kathniel": 36159, + "kathr": 14905, + "kathryn": 33142, + "kathryn": 19999, + "kathy": 34775, + "kathy": 18795, + "kati": 6515, + "kati": 29928, + "katic": 48058, + "katie": 24117, + "katie": 9076, + "katniss": 47916, + "kato": 27573, + "katrin": 31282, + "katrina": 21397, + "katrinakaif": 45845, + "kats": 44213, + "katsu": 49296, + "katsu": 43712, + "katy": 17609, + "katy": 14435, + "katyperry": 28309, + "katz": 30790, + "kau": 9299, + "kau": 36895, + "kauai": 44050, + "kaufman": 37188, + "kaur": 30518, + "kav": 10228, + "kavan": 18576, + "kavanaugh": 20252, + "kaw": 10842, + "kaw": 42719, + "kawa": 33244, + "kawaii": 26891, + "kawasaki": 28227, + "kawhi": 41220, + "kay": 4673, + "kay": 9862, + "kaya": 22752, + "kayak": 27043, + "kayaking": 28977, + "kaye": 33003, + "kayla": 17139, + "kaylee": 47215, + "kayo": 37021, + "kaz": 8812, + "kaz": 39622, + "kazakh": 25451, + "kazakhstan": 26720, + "kazan": 47641, + "kb": 27381, + "kb": 19960, + "kbs": 27418, + "kc": 10869, + "kc": 8638, + "kca": 14347, + "kcon": 39970, + "kcr": 46181, + "kd": 21826, + "kd": 15597, + "kday": 31074, + "kdrama": 48628, + "ke": 643, + "ke": 618, + "kea": 47926, + "kean": 43288, + "keane": 28635, + "keanu": 40608, + "kear": 21562, + "kearney": 36435, + "keating": 40045, + "keaton": 29975, + "kebab": 36497, + "ked": 11730, + "ked": 1243, + "kee": 9724, + "kee": 6760, + "keef": 42323, + "keefe": 46965, + "keegan": 31122, + "keel": 48376, + "keen": 17714, + "keen": 13218, + "keenan": 36276, + "keep": 2924, + "keep": 1726, + "keeper": 7650, + "keepers": 16130, + "keepin": 41712, + "keeping": 38371, + "keeping": 4873, + "keepit": 28044, + "keeps": 6333, + "keer": 27412, + "keerth": 47500, + "keerthyofficial": 48185, + "kees": 10791, + "keg": 32785, + "keh": 41272, + "keh": 36983, + "kei": 18735, + "kei": 24835, + "keith": 18762, + "keith": 8252, + "kej": 15674, + "kejri": 16617, + "kejriwal": 17334, + "keke": 39195, + "kel": 2825, + "kel": 7553, + "kele": 41765, + "kell": 16082, + "kell": 40103, + "keller": 21407, + "kelley": 23776, + "kelli": 45852, + "kelli": 46190, + "kellie": 49224, + "kellogg": 44218, + "kelly": 13417, + "kelly": 5220, + "kelown": 31708, + "kelowna": 32963, + "kelsey": 42295, + "kelsey": 23018, + "kelvin": 32859, + "kem": 31013, + "kem": 17349, + "kemp": 18302, + "kemp": 25325, + "ken": 1838, + "ken": 1702, + "kend": 7497, + "kendal": 44836, + "kendall": 34607, + "kendall": 16238, + "kendra": 36074, + "kendrick": 41787, + "kendrick": 21953, + "kendricklamar": 47020, + "kenne": 6209, + "kennedy": 38631, + "kennedy": 9004, + "kennel": 39595, + "kenneth": 46900, + "kenneth": 17839, + "kenney": 41373, + "kenny": 20185, + "kenny": 9595, + "kens": 29765, + "kensing": 21505, + "kensington": 24988, + "kent": 13875, + "kent": 8214, + "kentu": 9045, + "kentucky": 32230, + "kentucky": 10014, + "keny": 17374, + "kenya": 6181, + "kenyan": 22624, + "kenyans": 36263, + "kenyatta": 31012, + "kenzie": 38087, + "keo": 43062, + "kept": 7737, + "ker": 2352, + "ker": 1485, + "keral": 35122, + "kerala": 11881, + "kered": 26690, + "kerel": 32232, + "keri": 43447, + "kermit": 40908, + "kern": 40150, + "kernel": 40684, + "kerr": 20491, + "kerri": 41849, + "kerry": 24795, + "kerry": 13097, + "kers": 30347, + "kers": 2880, + "kershaw": 40785, + "kerson": 42810, + "kerswednesday": 48152, + "kert": 47279, + "kes": 38398, + "kes": 1115, + "kesh": 19751, + "kesha": 36526, + "kest": 15080, + "ket": 2715, + "ket": 1236, + "ketball": 38240, + "ketch": 22590, + "ketch": 35371, + "ketchup": 26724, + "kete": 25404, + "keted": 41396, + "keting": 15951, + "keto": 27485, + "keto": 28754, + "kets": 1632, + "kett": 23124, + "kett": 10312, + "kettering": 43779, + "kettle": 41992, + "kettle": 24303, + "kev": 22758, + "kev": 29419, + "kevin": 9419, + "kevin": 4685, + "kew": 38014, + "kew": 31409, + "kex": 30251, + "key": 2891, + "key": 1458, + "keyan": 27617, + "keyboard": 13017, + "keyboards": 49237, + "keychain": 31050, + "keye": 40516, + "keye": 20635, + "keyes": 18336, + "keynes": 32462, + "keynote": 7556, + "keys": 48912, + "keys": 6355, + "keystone": 30688, + "keyword": 42284, + "keywords": 48122, + "kf": 33308, + "kf": 42119, + "kfc": 22032, + "kg": 36772, + "kg": 7817, + "kgs": 46629, + "kh": 2166, + "kh": 7452, + "kha": 7333, + "kha": 18929, + "khair": 43742, + "khaki": 41646, + "khal": 13070, + "khaled": 29343, + "khali": 11324, + "khalid": 27166, + "khalifa": 21389, + "khalil": 36229, + "kham": 24892, + "khan": 13318, + "khan": 3873, + "khand": 43384, + "khand": 31110, + "khanna": 29931, + "khar": 18340, + "khar": 28578, + "khart": 37458, + "khat": 43290, + "khe": 26360, + "kher": 43843, + "khi": 39062, + "khi": 42925, + "khil": 34101, + "khloe": 45312, + "kho": 14022, + "kho": 28774, + "khou": 30656, + "khs": 21239, + "khtar": 45593, + "khu": 14041, + "khur": 32083, + "khy": 40917, + "khz": 45604, + "ki": 848, + "ki": 2608, + "kia": 8712, + "kian": 43961, + "kian": 25708, + "kians": 44010, + "kib": 43108, + "kiba": 37207, + "kic": 24003, + "kic": 27633, + "kicchasu": 44665, + "kicchasudeep": 45560, + "kick": 4102, + "kick": 4289, + "kickass": 39299, + "kickboxing": 36041, + "kicked": 12479, + "kicker": 26338, + "kickin": 34597, + "kicking": 7802, + "kickoff": 10245, + "kicks": 6989, + "kickstart": 40780, + "kickstarter": 13228, + "kid": 3948, + "kid": 3551, + "kidd": 24082, + "kidding": 14535, + "kiddo": 36360, + "kiddos": 29205, + "kidlit": 39064, + "kidlit": 33515, + "kidlitart": 41600, + "kidman": 44931, + "kidnap": 45100, + "kidnapp": 16183, + "kidnapped": 24737, + "kidnapping": 32361, + "kidney": 37835, + "kidney": 14610, + "kids": 15561, + "kids": 1911, + "kidz": 41938, + "kie": 8544, + "kie": 3094, + "kiefer": 48026, + "kiel": 40940, + "kiel": 25509, + "kien": 28782, + "kier": 20403, + "kier": 35575, + "kieran": 29231, + "kies": 36601, + "kies": 4993, + "kiest": 29755, + "kiev": 24585, + "kiewicz": 47574, + "kigali": 40278, + "kii": 39340, + "kik": 36176, + "kiki": 23962, + "kiko": 40861, + "kil": 4912, + "kil": 39337, + "kildare": 45541, + "kili": 24386, + "kilig": 49172, + "kilimanjaro": 43470, + "kilkenny": 33805, + "kill": 6163, + "kill": 4367, + "killa": 41355, + "killarney": 48813, + "killed": 3733, + "killer": 28230, + "killer": 6613, + "killers": 17614, + "killin": 25903, + "killing": 37977, + "killing": 5923, + "killings": 24918, + "kills": 9795, + "kiln": 44150, + "kilo": 39281, + "kilom": 26285, + "kilometers": 39192, + "kilometres": 43278, + "kilt": 49319, + "kim": 4639, + "kim": 4606, + "kimber": 16796, + "kimberley": 39859, + "kimberly": 27465, + "kimchi": 41027, + "kimi": 31536, + "kimkardashian": 35400, + "kimmel": 27820, + "kimono": 40024, + "kin": 1442, + "kin": 2667, + "kina": 28518, + "kind": 7204, + "kind": 3044, + "kinda": 6612, + "kinder": 12711, + "kinder": 24159, + "kindergarten": 16749, + "kindle": 24704, + "kindle": 10746, + "kindleunlimited": 32164, + "kindly": 13952, + "kindness": 45112, + "kindness": 10614, + "kinds": 14879, + "kine": 17607, + "kineni": 49080, + "kinetic": 37699, + "king": 2365, + "king": 674, + "kingdom": 21870, + "kingdom": 7364, + "kingdomhearts": 48570, + "kingdoms": 43890, + "kingfisher": 34330, + "kingjames": 33153, + "kingly": 33642, + "kingof": 27878, + "kings": 18590, + "kings": 4232, + "kingsley": 41807, + "kingston": 40736, + "kingston": 15393, + "kini": 41644, + "kinky": 37006, + "kinney": 37233, + "kino": 39000, + "kins": 31060, + "kins": 4386, + "kinson": 12095, + "kio": 28210, + "kio": 39401, + "kiosk": 39146, + "kip": 27636, + "kip": 15986, + "kipp": 43329, + "kir": 3476, + "kir": 32949, + "kira": 33038, + "kiran": 43234, + "kiran": 36603, + "kirby": 17065, + "kiri": 34170, + "kiri": 45826, + "kirk": 10639, + "kirk": 11508, + "kirkland": 43061, + "kiro": 39749, + "kirstel": 46483, + "kirsten": 31813, + "kirsty": 37787, + "kis": 3199, + "kis": 22796, + "kish": 25662, + "kiss": 43757, + "kiss": 5946, + "kissed": 22561, + "kisses": 47876, + "kisses": 11220, + "kissing": 18637, + "kistan": 29580, + "kit": 4566, + "kit": 4274, + "kita": 29961, + "kitch": 3850, + "kitchen": 18131, + "kitchen": 4485, + "kitchener": 34428, + "kitchens": 28301, + "kite": 47777, + "kite": 19867, + "kites": 45829, + "kits": 13730, + "kitt": 10840, + "kitten": 13063, + "kittens": 17216, + "kitties": 36013, + "kitty": 25067, + "kitty": 8417, + "kiwan": 38709, + "kiwanis": 46513, + "kiwi": 22440, + "kiwis": 48108, + "kiya": 41610, + "kj": 27385, + "kj": 28238, + "kja": 41048, + "kjv": 37387, + "kk": 4390, + "kk": 10849, + "kka": 19002, + "kke": 44239, + "kker": 32399, + "kki": 44672, + "kkk": 20073, + "kkkk": 15834, + "kkkk": 47160, + "kkkkkkkk": 31042, + "kko": 43965, + "kkr": 40855, + "kl": 8498, + "kl": 14134, + "kla": 11249, + "klan": 46935, + "klar": 41374, + "klaus": 31788, + "kle": 7612, + "kle": 7432, + "klein": 33475, + "klein": 17579, + "kley": 18594, + "kli": 31640, + "klin": 44809, + "klin": 41647, + "kline": 47580, + "kling": 40270, + "klm": 38859, + "klo": 15296, + "klopp": 26446, + "kltu": 25978, + "klu": 21852, + "kly": 45090, + "km": 29954, + "km": 4590, + "kman": 33312, + "kms": 24996, + "kn": 4825, + "kn": 23693, + "knapp": 33945, + "kne": 6358, + "knee": 9897, + "knees": 19115, + "kner": 31578, + "knew": 5009, + "kni": 6312, + "knick": 33286, + "knicks": 17657, + "knife": 44176, + "knife": 8960, + "knigh": 43099, + "knight": 17949, + "knight": 7355, + "knights": 10385, + "knit": 18745, + "knit": 14313, + "knitted": 28151, + "knitting": 18863, + "knives": 20910, + "kno": 1482, + "kno": 25362, + "knob": 29736, + "knobs": 47504, + "knock": 14195, + "knock": 11583, + "knocked": 15325, + "knocking": 20380, + "knockout": 22602, + "knocks": 24296, + "knoll": 43882, + "knot": 18412, + "knots": 32428, + "know": 4179, + "know": 1038, + "knowing": 9267, + "knowledge": 27864, + "knowledge": 5510, + "knowledgeable": 43391, + "knowles": 32631, + "known": 3102, + "knows": 4309, + "knowyour": 30773, + "knox": 18630, + "knox": 21833, + "knoxville": 23232, + "knu": 14812, + "knuck": 21333, + "knuckle": 42023, + "knuckles": 40127, + "knw": 40803, + "ko": 1313, + "ko": 2448, + "koala": 36654, + "kobe": 42644, + "kobe": 14470, + "kobo": 42390, + "koch": 25331, + "kochi": 36710, + "kodak": 30425, + "kodi": 46611, + "kof": 17528, + "koff": 47303, + "kofi": 40400, + "koh": 13379, + "koh": 31216, + "kohl": 48479, + "kohli": 17549, + "koi": 28150, + "kojima": 46419, + "kok": 32045, + "kok": 11225, + "koko": 42426, + "koko": 40003, + "kol": 7142, + "kol": 31023, + "kolkata": 18011, + "kom": 6686, + "kom": 24181, + "kombat": 29670, + "kombucha": 48615, + "komo": 31820, + "kon": 5743, + "kon": 29519, + "kona": 30203, + "kong": 31784, + "kong": 6506, + "konstant": 46583, + "koo": 12225, + "koo": 40472, + "kook": 16003, + "kool": 36755, + "kool": 26444, + "kop": 16623, + "kop": 38999, + "kor": 6428, + "kor": 24175, + "kore": 3919, + "korea": 5915, + "korean": 31949, + "korean": 8034, + "kori": 42842, + "korn": 45412, + "korn": 31492, + "kors": 34535, + "kos": 47438, + "kos": 22951, + "kosh": 45233, + "kosher": 36502, + "koso": 23892, + "kosovo": 28343, + "kot": 23323, + "kot": 20701, + "kota": 21735, + "koto": 40945, + "koto": 29977, + "kou": 18502, + "kou": 39614, + "kour": 34134, + "kov": 17733, + "kov": 15156, + "kova": 26185, + "koval": 47903, + "kovic": 16886, + "kovich": 44794, + "kovsky": 33384, + "kow": 29764, + "kow": 23919, + "kowski": 17649, + "koz": 29598, + "kp": 16174, + "kp": 16894, + "kpa": 38759, + "kph": 41138, + "kpk": 42094, + "kpmg": 38243, + "kpop": 29534, + "kpop": 15859, + "kprc": 47832, + "kprs": 46253, + "kr": 7309, + "kr": 14107, + "kra": 5762, + "kraft": 28057, + "kraja": 29016, + "kraken": 48408, + "krakow": 40033, + "kram": 19075, + "kramer": 27495, + "kran": 33243, + "kranti": 47969, + "krat": 30470, + "kre": 8362, + "kreme": 43140, + "kremlin": 33979, + "kri": 3679, + "kris": 35251, + "kris": 12261, + "krish": 11487, + "krishna": 15863, + "krishnan": 46535, + "krispy": 49292, + "krist": 16490, + "kristen": 28881, + "kristen": 16644, + "kristi": 26895, + "kristin": 35408, + "kristin": 26785, + "kristina": 33180, + "krit": 36265, + "kro": 16193, + "kroger": 36344, + "kron": 25999, + "kru": 10609, + "kruger": 32948, + "krun": 43084, + "kry": 13995, + "krystal": 36554, + "ks": 10470, + "ks": 662, + "ksa": 25439, + "ksh": 36594, + "kst": 17420, + "kstate": 48590, + "ksu": 43496, + "kswx": 36180, + "kt": 17238, + "kt": 7792, + "ktm": 33989, + "ktn": 42170, + "kton": 37848, + "kts": 48577, + "ktv": 36444, + "ku": 1836, + "ku": 4827, + "kuala": 30336, + "kubball": 48995, + "kuber": 41336, + "kubernetes": 45144, + "kubrick": 37032, + "kuch": 39394, + "kud": 40818, + "kudos": 14481, + "kul": 11325, + "kul": 31514, + "kum": 18086, + "kum": 28148, + "kuma": 43139, + "kuma": 33920, + "kumar": 22329, + "kumar": 7674, + "kumb": 31391, + "kun": 6849, + "kun": 21842, + "kung": 39656, + "kung": 22347, + "kunst": 37881, + "kup": 39023, + "kups": 27240, + "kur": 4862, + "kurdi": 23504, + "kurdish": 21644, + "kurdistan": 24459, + "kurds": 20888, + "kuri": 46375, + "kuro": 28239, + "kuro": 47826, + "kurt": 31903, + "kurt": 14527, + "kus": 27618, + "kus": 27505, + "kush": 22264, + "kush": 24594, + "kushner": 36716, + "kut": 17283, + "kut": 36965, + "kuwait": 19679, + "kuya": 34815, + "kuz": 33253, + "kv": 27594, + "kv": 34249, + "kw": 10072, + "kw": 18339, + "kwa": 32784, + "kwa": 48576, + "kwame": 46681, + "kwan": 37100, + "kwan": 39447, + "kwang": 40260, + "kwe": 26050, + "kwi": 35327, + "kwon": 36369, + "kx": 28190, + "kx": 46442, + "ky": 2018, + "ky": 2383, + "kya": 29142, + "kyc": 37758, + "kyiv": 36422, + "kyle": 15847, + "kyle": 7539, + "kylie": 28282, + "kylie": 17983, + "kyliejenner": 47232, + "kylo": 47704, + "kyo": 13150, + "kyo": 6281, + "kyoto": 23223, + "kyr": 26329, + "kyrgy": 40013, + "kyrgyz": 48346, + "kyrie": 21857, + "kyu": 28296, + "kyu": 25490, + "kyuhyun": 37229, + "kyung": 41058, + "kyungsoo": 30280, + "kywx": 39940, + "kz": 48743, + "kz": 36848, + "kzn": 38264, + "kö": 32437, + "l": 75, + "l": 331, + "la": 572, + "la": 1210, + "laa": 44642, + "lab": 3537, + "lab": 4352, + "labe": 25749, + "label": 12235, + "label": 9093, + "labeled": 32720, + "labeling": 36825, + "labelled": 45188, + "labels": 17413, + "lable": 31879, + "labor": 11201, + "labor": 7878, + "laboratories": 43421, + "laboratory": 17664, + "laborday": 39324, + "labou": 32700, + "labour": 19586, + "labour": 6019, + "labourdoorstep": 37008, + "labout": 35961, + "labra": 37067, + "labrador": 25409, + "labs": 12021, + "laby": 29131, + "labyrin": 31782, + "labyrinth": 35594, + "lac": 4477, + "lac": 16189, + "lace": 30012, + "lace": 5421, + "laced": 36800, + "laces": 23281, + "lacey": 31754, + "lach": 30558, + "lack": 24915, + "lack": 8069, + "lacking": 30080, + "lacks": 34388, + "laco": 45882, + "lacrosse": 12915, + "lacy": 38645, + "lad": 15991, + "lad": 10707, + "ladak": 42312, + "ladakh": 45295, + "ladder": 16637, + "ladders": 47125, + "lade": 26447, + "laden": 28634, + "ladi": 12934, + "ladies": 28932, + "ladies": 3431, + "lads": 9803, + "lady": 7275, + "lady": 2909, + "ladybird": 43389, + "ladybug": 40038, + "ladygaga": 21232, + "laf": 47555, + "lafayette": 22683, + "lag": 30932, + "lag": 20394, + "laga": 30161, + "lage": 24369, + "lager": 36811, + "lager": 22989, + "lagh": 37237, + "laghate": 47565, + "laghateparth": 48780, + "lagi": 39786, + "lago": 42698, + "lago": 31476, + "lagoon": 22753, + "lagos": 12728, + "lagun": 18500, + "laguna": 23609, + "lah": 27315, + "lah": 4299, + "lahat": 42164, + "lahore": 16733, + "lai": 23947, + "laid": 42560, + "laid": 11160, + "lain": 46958, + "lain": 17151, + "laine": 35860, + "lair": 31981, + "lais": 34923, + "lak": 12890, + "lak": 26793, + "lake": 6441, + "lake": 2553, + "lakedistrict": 26437, + "lakel": 26133, + "lakeland": 34306, + "laker": 45717, + "lakers": 13570, + "lakes": 9265, + "lakeshore": 42595, + "lakeside": 30915, + "lakewood": 36417, + "lakh": 21487, + "lakhs": 37985, + "lakings": 34289, + "lakota": 45510, + "laksh": 24937, + "lakshmi": 39682, + "lal": 12301, + "lal": 19430, + "lala": 33661, + "lali": 21726, + "laliga": 32383, + "lam": 2022, + "lam": 5704, + "lama": 26049, + "lamar": 28678, + "lamar": 17284, + "lamb": 19863, + "lamb": 10034, + "lambda": 36687, + "lambert": 14574, + "lambeth": 43410, + "lambo": 45464, + "lamborgh": 18709, + "lamborghini": 19462, + "lambs": 30361, + "lame": 23192, + "lamin": 22337, + "laminated": 49079, + "lamo": 41461, + "lamont": 46719, + "lamp": 26700, + "lamp": 10725, + "lampard": 39989, + "lamps": 23424, + "lan": 1193, + "lan": 4872, + "lana": 15406, + "lanapar": 47437, + "lanaparrilla": 47819, + "lanc": 11872, + "lanca": 15694, + "lancashire": 20939, + "lancaster": 16446, + "lance": 26025, + "lance": 11609, + "lancer": 38195, + "lancers": 46392, + "lancia": 48698, + "lancs": 47540, + "land": 1567, + "land": 973, + "lande": 36556, + "landed": 9873, + "lander": 37247, + "lander": 9666, + "landers": 20019, + "landfall": 38465, + "landfill": 34947, + "landia": 41384, + "landing": 8292, + "landings": 46104, + "landlord": 28938, + "landlords": 35283, + "landmark": 15208, + "landmarks": 30393, + "lando": 25463, + "lando": 7065, + "landon": 32748, + "landrover": 38125, + "landry": 36137, + "lands": 40223, + "lands": 2961, + "landsc": 4384, + "landscape": 21123, + "landscape": 5727, + "landscapephotography": 28125, + "landscapes": 15344, + "landscaping": 25642, + "landslide": 31954, + "lane": 25534, + "lane": 3980, + "lanes": 10345, + "laney": 38552, + "lang": 7969, + "lang": 8578, + "lange": 32021, + "langford": 45615, + "langley": 28595, + "langu": 4095, + "language": 46103, + "language": 4781, + "languages": 13527, + "lani": 22964, + "lanka": 16221, + "lankan": 40531, + "lannister": 49056, + "lans": 43550, + "lansing": 30805, + "lant": 44504, + "lanta": 44768, + "lantern": 17185, + "lanterns": 33676, + "lantic": 32601, + "lantic": 27678, + "lants": 38425, + "lanyard": 46808, + "lao": 32475, + "lao": 29521, + "laos": 34353, + "lap": 7213, + "lap": 8639, + "lapd": 32557, + "lapel": 47961, + "lapland": 43633, + "laps": 18711, + "lapse": 33365, + "laptop": 10464, + "laptops": 32189, + "laq": 45026, + "lar": 1592, + "lar": 1652, + "lara": 19435, + "lard": 40347, + "lare": 22415, + "laredo": 48427, + "large": 40234, + "large": 3638, + "largely": 21418, + "larger": 12567, + "largest": 4960, + "largo": 44161, + "lari": 34676, + "lark": 43164, + "lark": 23536, + "larkin": 34769, + "larry": 18642, + "larry": 8242, + "lars": 8669, + "larsen": 39721, + "larson": 27973, + "larvae": 44840, + "las": 8295, + "las": 2552, + "lasag": 31210, + "lasagna": 40683, + "lasalle": 43866, + "laser": 25607, + "laser": 9885, + "lasers": 37060, + "lash": 31995, + "lash": 18480, + "lashes": 21015, + "lass": 24203, + "lass": 18263, + "lassic": 39430, + "last": 10600, + "last": 952, + "lasted": 25711, + "lasting": 13434, + "lastnight": 30159, + "lasts": 20141, + "lasvegas": 17789, + "lat": 1591, + "lat": 28437, + "lata": 47114, + "latam": 40012, + "late": 13267, + "late": 2325, + "latel": 49035, + "lately": 11824, + "latepost": 48328, + "later": 24109, + "later": 2941, + "lateral": 26646, + "latest": 46805, + "latest": 2053, + "latex": 27520, + "lati": 16357, + "latimes": 43356, + "latin": 16695, + "latin": 9888, + "latina": 27936, + "latino": 45734, + "latino": 19470, + "latinos": 40233, + "lation": 6191, + "latitude": 37392, + "lative": 15719, + "lator": 9291, + "lators": 28278, + "latt": 33561, + "latte": 17697, + "latter": 26198, + "latvia": 30034, + "lau": 1853, + "lau": 23090, + "lauderdale": 24352, + "laugh": 4969, + "laugh": 6332, + "laughed": 16746, + "laughing": 8301, + "laughs": 14322, + "laughter": 10722, + "laun": 2944, + "launch": 31168, + "launch": 2904, + "launched": 6125, + "launcher": 35782, + "launches": 7023, + "launching": 8565, + "laundering": 34079, + "laundry": 14797, + "laur": 15256, + "laura": 17091, + "laura": 7763, + "laure": 16932, + "laureate": 25675, + "laurel": 43370, + "laurel": 19942, + "lauren": 10456, + "lauren": 7634, + "laurence": 29353, + "laurent": 23226, + "laurie": 20326, + "laus": 38895, + "laus": 28111, + "lause": 22269, + "laut": 47688, + "lav": 13767, + "lav": 26919, + "lava": 16765, + "laven": 15047, + "lavender": 16033, + "laver": 28188, + "lavish": 35443, + "law": 2874, + "law": 2606, + "lawful": 33845, + "lawler": 47862, + "lawless": 39468, + "lawmaker": 37169, + "lawmakers": 21190, + "lawn": 31675, + "lawn": 11024, + "lawrence": 32221, + "lawrence": 8820, + "laws": 7306, + "lawson": 22152, + "lawsuit": 14346, + "lawsuits": 44331, + "lawyer": 10552, + "lawyers": 14232, + "lax": 17750, + "lax": 10024, + "lay": 7205, + "lay": 6360, + "laye": 25995, + "layer": 12411, + "layered": 28520, + "layers": 15900, + "laying": 12333, + "layla": 45050, + "layne": 48721, + "layo": 21738, + "layoffs": 29019, + "layout": 17314, + "lays": 19546, + "layton": 38061, + "laz": 18806, + "lazar": 33075, + "lazarus": 49126, + "laze": 41559, + "lazer": 43735, + "lazio": 33010, + "lazy": 32614, + "lazy": 10753, + "lb": 21958, + "lb": 7422, + "lbc": 37694, + "lbj": 45683, + "lbloggers": 48695, + "lbs": 8912, + "lc": 9584, + "lc": 7225, + "lcd": 21356, + "lcfc": 25339, + "lcs": 32279, + "ld": 1431, + "ld": 730, + "lder": 6945, + "lders": 43221, + "ldn": 37050, + "ldn": 2517, + "ldnont": 25827, + "ldnt": 21690, + "ldr": 37279, + "lds": 31235, + "le": 534, + "le": 579, + "lea": 2246, + "lea": 13324, + "leach": 35527, + "lead": 1328, + "lead": 2784, + "leader": 14806, + "leader": 3236, + "leaderboard": 34519, + "leaders": 3546, + "leadership": 36876, + "leadership": 3652, + "leading": 3833, + "leads": 5335, + "leaf": 9377, + "leaf": 7232, + "leaflet": 38289, + "leaflets": 39014, + "leafs": 16688, + "leafy": 42616, + "leagu": 13317, + "league": 16635, + "league": 2313, + "leagueof": 26022, + "leagueoflegends": 31737, + "leagues": 19888, + "leah": 24350, + "leah": 19308, + "leak": 42900, + "leak": 15489, + "leaked": 14353, + "leaking": 34097, + "leaks": 15657, + "leam": 39606, + "lean": 12447, + "lean": 8208, + "leaning": 24411, + "leanne": 41448, + "leans": 9357, + "leap": 29129, + "leap": 15392, + "leaps": 48080, + "lear": 1146, + "lear": 27663, + "learn": 16959, + "learn": 1768, + "learned": 6048, + "learnenglish": 49040, + "learner": 33547, + "learners": 19572, + "learning": 22632, + "learning": 2378, + "learns": 17569, + "learnt": 18959, + "leary": 36051, + "lease": 49041, + "lease": 14394, + "leased": 48352, + "leash": 36192, + "leasing": 29160, + "least": 3651, + "leather": 21417, + "leather": 5862, + "leau": 26498, + "leav": 3198, + "leave": 37512, + "leave": 3258, + "leaves": 5579, + "leaving": 5216, + "leban": 9360, + "lebanese": 23819, + "lebanon": 11695, + "leblanc": 46381, + "lebo": 44184, + "lebron": 11971, + "lebu": 47030, + "lec": 944, + "lec": 35374, + "leche": 46197, + "lect": 45392, + "lection": 18252, + "lections": 30995, + "lecture": 6617, + "lecturer": 23795, + "lectures": 21118, + "led": 8767, + "led": 912, + "ledge": 23647, + "ledge": 4815, + "ledger": 26817, + "leds": 36763, + "lee": 6224, + "lee": 2592, + "leed": 16483, + "leed": 40206, + "leeds": 38900, + "leeds": 7420, + "leek": 34585, + "leeminho": 37831, + "leen": 35311, + "leen": 15940, + "leep": 48875, + "leep": 10191, + "lees": 29324, + "lees": 34056, + "lef": 9152, + "left": 33949, + "left": 1823, + "leftist": 35143, + "lefto": 17437, + "leftover": 26414, + "leftovers": 28481, + "lefty": 33935, + "leg": 1211, + "leg": 4924, + "lega": 38674, + "legacy": 44108, + "legacy": 6447, + "legal": 17743, + "legal": 3998, + "legalization": 40584, + "legalize": 42921, + "legally": 14152, + "legate": 46009, + "lege": 8065, + "legen": 6105, + "legend": 5480, + "legend": 3539, + "legendary": 6053, + "legendof": 47915, + "legends": 6396, + "leges": 15356, + "legg": 18474, + "legg": 32511, + "legged": 25830, + "leggings": 22895, + "leggo": 43441, + "legi": 11183, + "legion": 35503, + "legion": 14525, + "legis": 7200, + "legislat": 16486, + "legislation": 14143, + "legislative": 16755, + "legislators": 31572, + "legislature": 22309, + "legit": 12563, + "legitim": 17656, + "legitimate": 24491, + "lego": 28117, + "lego": 7849, + "legos": 45359, + "legs": 7072, + "leh": 19105, + "leh": 29298, + "lehead": 28090, + "lehigh": 34527, + "lehman": 46094, + "lei": 15828, + "lei": 21830, + "leia": 32723, + "leic": 35073, + "leica": 30206, + "leice": 10026, + "leicester": 28795, + "leicester": 11510, + "leicestershire": 45358, + "leigh": 14849, + "leigh": 9292, + "leighton": 30782, + "leila": 41342, + "lein": 20026, + "lein": 28551, + "leinster": 32242, + "leip": 36401, + "leipzig": 41860, + "leis": 13133, + "leisure": 15849, + "leit": 35446, + "leith": 34141, + "lek": 26626, + "lek": 36535, + "lel": 46623, + "lele": 26075, + "lem": 10213, + "lem": 8428, + "leman": 24478, + "lemans": 26694, + "lement": 9693, + "lements": 15833, + "lemme": 23318, + "lemon": 12272, + "lemon": 7184, + "lemonade": 18884, + "lemons": 29576, + "lemore": 41147, + "len": 3687, + "len": 2159, + "lena": 22038, + "lend": 45397, + "lend": 24987, + "lender": 44734, + "lenders": 42443, + "lending": 20209, + "lene": 17628, + "leness": 36551, + "leng": 7861, + "length": 10130, + "lengths": 31858, + "lengthy": 32624, + "lenin": 41760, + "lennon": 18360, + "lennox": 45748, + "lenny": 48448, + "lenny": 30124, + "leno": 45357, + "lenovo": 25886, + "lens": 8666, + "lenses": 21264, + "lent": 20943, + "lent": 22605, + "lentil": 41511, + "lentils": 44269, + "leo": 24008, + "leo": 8312, + "leon": 6581, + "leon": 9763, + "leonard": 43849, + "leonard": 13142, + "leonardo": 20282, + "leone": 22864, + "leop": 11234, + "leopard": 15931, + "leopards": 40996, + "leopold": 45501, + "lep": 48884, + "leppard": 41656, + "lepre": 45641, + "ler": 5587, + "ler": 1803, + "lero": 15067, + "lerosis": 35455, + "leroy": 32441, + "lers": 6247, + "lery": 38184, + "les": 4339, + "les": 840, + "lesbian": 17419, + "lesbians": 43182, + "lesh": 32282, + "lesley": 25506, + "lesli": 13649, + "leslie": 16244, + "lesn": 39568, + "lesnar": 42223, + "less": 3242, + "less": 1285, + "lesser": 20369, + "lessly": 13103, + "lessness": 24847, + "lesson": 7714, + "lessons": 7199, + "lest": 24372, + "lest": 6794, + "lester": 23157, + "lester": 24023, + "lestwe": 29726, + "lestweforget": 30273, + "let": 1898, + "let": 1094, + "leta": 34319, + "lete": 34078, + "letes": 6815, + "leth": 30022, + "leth": 42462, + "lethal": 21905, + "lethbridge": 48390, + "leti": 34176, + "letics": 14504, + "letit": 46423, + "leto": 32203, + "leton": 37674, + "leton": 7462, + "lets": 10448, + "lets": 3243, + "letsgo": 16967, + "letsgo": 29789, + "letstalk": 35591, + "lett": 22428, + "lett": 9778, + "lette": 41798, + "lette": 10301, + "letter": 15567, + "letter": 4861, + "lettering": 26382, + "letterman": 38447, + "letters": 9181, + "letting": 9510, + "letto": 35449, + "lettu": 17933, + "lettuce": 18573, + "leu": 15691, + "leuke": 31031, + "leukemia": 32097, + "leum": 21571, + "leur": 45806, + "lev": 17022, + "lev": 29950, + "levan": 42543, + "leve": 36271, + "level": 21682, + "level": 2931, + "leveled": 48453, + "levels": 6295, + "leven": 44792, + "leven": 34729, + "lever": 20178, + "lever": 23094, + "leverage": 24030, + "leveraging": 37948, + "levi": 25630, + "levi": 19113, + "leviathan": 41736, + "levin": 36949, + "levine": 26594, + "levit": 22715, + "levy": 17147, + "lew": 5063, + "lew": 25329, + "lewan": 48349, + "lewd": 45241, + "lewes": 40431, + "lewi": 19589, + "lewis": 22043, + "lewis": 6020, + "lewisham": 37385, + "lewisham": 47633, + "lewishamilton": 42960, + "lewood": 37951, + "lex": 6586, + "lex": 9658, + "lexa": 48259, + "lexi": 44231, + "lexi": 24679, + "lexington": 22308, + "lexus": 20694, + "ley": 2565, + "ley": 1066, + "leye": 37061, + "leys": 45609, + "leys": 14834, + "leyton": 46573, + "lez": 26442, + "lf": 33960, + "lf": 22078, + "lfc": 37826, + "lfc": 8267, + "lfw": 28514, + "lg": 4546, + "lg": 11368, + "lga": 39348, + "lgb": 25401, + "lgbt": 11743, + "lgbt": 9592, + "lgbti": 42730, + "lgbtq": 47625, + "lgbtq": 14939, + "lgm": 39389, + "lh": 27794, + "lh": 31159, + "lhp": 45092, + "lhs": 33170, + "li": 554, + "li": 4250, + "lia": 26118, + "lia": 6964, + "liability": 29139, + "liaison": 39294, + "liam": 5258, + "liam": 7167, + "lian": 18058, + "liance": 40864, + "liar": 16334, + "liars": 23863, + "lias": 46021, + "lib": 10249, + "lib": 13345, + "libby": 36832, + "libdems": 40869, + "liber": 3425, + "liberal": 48032, + "liberal": 9985, + "liberalism": 40018, + "liberals": 15981, + "liberated": 38690, + "liberation": 19507, + "liberia": 32208, + "libertarian": 35067, + "liberties": 48623, + "liberty": 23397, + "liberty": 8480, + "libr": 2856, + "libra": 43038, + "librarian": 25148, + "librarians": 37806, + "libraries": 14277, + "library": 25713, + "library": 3519, + "libre": 49210, + "libre": 31681, + "libs": 26401, + "liby": 36390, + "libya": 16417, + "libyan": 42319, + "lic": 2508, + "lic": 3376, + "lice": 45691, + "licen": 6706, + "licence": 20550, + "license": 10337, + "licensed": 18752, + "licenses": 36414, + "licensing": 24219, + "lich": 23979, + "lich": 25875, + "lick": 29197, + "lick": 17541, + "licking": 33013, + "licks": 42117, + "lics": 44552, + "lid": 39369, + "lid": 17678, + "lidge": 45558, + "lido": 35683, + "lids": 41609, + "lie": 6570, + "lie": 2538, + "lieb": 45387, + "liebe": 37749, + "lied": 6486, + "lief": 38428, + "lien": 45716, + "lier": 3626, + "liers": 19303, + "lies": 37236, + "lies": 3205, + "liest": 14020, + "liet": 41107, + "lieu": 20401, + "lieu": 35313, + "lieutenant": 22538, + "lif": 16456, + "life": 2666, + "life": 970, + "lifeat": 27801, + "lifeboat": 37404, + "lifecycle": 49171, + "lifein": 48447, + "lifeis": 24824, + "lifeisgood": 46433, + "lifel": 15025, + "lifeline": 38438, + "lifelong": 21358, + "lifeof": 36061, + "lifesaving": 48016, + "lifespan": 49257, + "lifestyle": 46512, + "lifestyle": 7037, + "lifestyles": 48521, + "lifetime": 48737, + "lifetime": 9107, + "liff": 34404, + "liffe": 38942, + "lift": 33146, + "lift": 6779, + "lifted": 16783, + "lifter": 38555, + "lifting": 10857, + "lifts": 18291, + "lig": 19915, + "lig": 38493, + "liga": 16802, + "ligam": 31077, + "ligament": 48705, + "ligan": 27962, + "ligans": 42133, + "ligh": 7510, + "light": 3885, + "light": 1395, + "lighted": 18404, + "lighten": 32717, + "lightening": 28170, + "lighter": 14102, + "lighthouse": 13717, + "lighting": 5799, + "lightly": 26878, + "lightning": 7756, + "lightroom": 41454, + "lights": 3073, + "lightweight": 16278, + "ligu": 42920, + "ligue": 29196, + "lik": 4831, + "lik": 18495, + "like": 9175, + "like": 789, + "liked": 7112, + "likefor": 48444, + "likeli": 40666, + "likelihood": 48158, + "likely": 5256, + "liken": 36084, + "likes": 4724, + "liking": 16810, + "lil": 6012, + "lil": 4461, + "lilac": 33647, + "lili": 26686, + "lili": 48411, + "lilies": 38110, + "lillard": 47016, + "lille": 38705, + "lilli": 40920, + "lillian": 41563, + "lilly": 47825, + "lilly": 21815, + "lily": 23803, + "lily": 10647, + "lim": 2377, + "lim": 17204, + "lima": 17589, + "limb": 27061, + "limb": 32363, + "limbo": 46179, + "limbs": 34886, + "lime": 17385, + "lime": 11193, + "limel": 48658, + "limer": 16915, + "limerick": 19501, + "limestone": 27272, + "limit": 18933, + "limit": 9973, + "limitations": 32730, + "limited": 49229, + "limited": 3472, + "limiting": 35812, + "limitless": 35833, + "limits": 11966, + "limo": 33166, + "limous": 47287, + "limpopo": 47175, + "lin": 1254, + "lin": 2424, + "lina": 26110, + "lincol": 6239, + "lincoln": 16957, + "lincoln": 7454, + "lincolnshire": 29014, + "lind": 6492, + "linda": 45410, + "linda": 10760, + "linden": 44076, + "linden": 34832, + "lindo": 38467, + "lindsay": 29846, + "lindsay": 16858, + "lindsey": 29475, + "lindsey": 18128, + "line": 3674, + "line": 1148, + "linear": 19816, + "linebacker": 29848, + "lined": 11842, + "lineman": 31501, + "linen": 20032, + "liner": 11618, + "liners": 24463, + "lines": 3418, + "liness": 28633, + "lineup": 7316, + "lineups": 33589, + "ling": 4851, + "ling": 1358, + "linger": 29593, + "lingerie": 18473, + "lingering": 46494, + "lings": 11390, + "lington": 27673, + "lington": 9002, + "lingu": 34449, + "lingui": 29942, + "linguistic": 46847, + "linguistics": 48651, + "lining": 11589, + "link": 18433, + "link": 2468, + "linke": 15088, + "linked": 11059, + "linkedin": 16302, + "linkin": 40287, + "linkin": 49291, + "linking": 23296, + "links": 8113, + "linn": 37431, + "lino": 41189, + "lino": 34995, + "lins": 6567, + "linson": 15401, + "linton": 36479, + "linus": 49303, + "linux": 14061, + "lio": 19395, + "lion": 8872, + "lion": 5567, + "lionel": 19441, + "lions": 7093, + "lip": 8630, + "lip": 8546, + "lipo": 38795, + "lipp": 38074, + "lips": 8847, + "lipse": 10351, + "lipstick": 15618, + "liqu": 6310, + "lique": 32680, + "liqueur": 43612, + "liqui": 33817, + "liquid": 18366, + "liquid": 10158, + "liquidity": 42812, + "liquor": 17828, + "lis": 7297, + "lis": 12749, + "lisa": 25236, + "lisa": 7424, + "lisam": 43072, + "lisboa": 40052, + "lisbon": 17708, + "lish": 12658, + "lish": 2354, + "lished": 22620, + "lisle": 21529, + "lism": 34390, + "liss": 45489, + "liss": 35433, + "lisse": 49309, + "list": 1734, + "list": 1998, + "lista": 37812, + "listed": 6457, + "listen": 17454, + "listen": 2672, + "listened": 15347, + "listener": 34819, + "listeners": 26901, + "listening": 3656, + "listens": 25912, + "lister": 45109, + "listing": 8145, + "listings": 21987, + "liston": 48041, + "lists": 12281, + "lit": 2213, + "lit": 4350, + "lita": 30100, + "lite": 29273, + "lite": 13694, + "litecoin": 39063, + "liter": 3085, + "liter": 34904, + "literacy": 12841, + "literal": 24269, + "literally": 4719, + "literary": 13586, + "literature": 11072, + "litfest": 40369, + "lith": 37005, + "lithium": 22794, + "litho": 31088, + "lithograph": 49022, + "lithu": 21045, + "lithuania": 27068, + "liti": 24292, + "litigation": 31769, + "lito": 47381, + "litre": 25786, + "litres": 39919, + "litt": 1216, + "litt": 47583, + "litter": 45431, + "litter": 17118, + "litters": 45300, + "little": 7024, + "little": 1274, + "littlemix": 29731, + "littlest": 48969, + "litur": 36830, + "litz": 30357, + "liu": 20466, + "liv": 13895, + "liv": 19901, + "livan": 12785, + "live": 3215, + "live": 1064, + "lived": 8867, + "livel": 17973, + "liveli": 26566, + "livelihood": 46497, + "livelihoods": 47716, + "lively": 19663, + "liveme": 35396, + "livemusic": 15688, + "liven": 41057, + "liveon": 22815, + "livepd": 38742, + "livepd": 31899, + "liver": 4755, + "liver": 12639, + "liverpool": 29778, + "liverpool": 5366, + "livery": 23248, + "lives": 3247, + "livesmatter": 20348, + "livestock": 22079, + "livestream": 16844, + "livetweet": 38546, + "livin": 28061, + "living": 10965, + "living": 2815, + "livingston": 30551, + "lix": 45068, + "liz": 8632, + "liz": 12242, + "liza": 28787, + "lizard": 17221, + "lizards": 41991, + "lizasober": 44487, + "lizasoberano": 45076, + "lizz": 34430, + "lizzie": 29530, + "lizzy": 32306, + "lj": 34211, + "lj": 32273, + "lju": 44562, + "lk": 39110, + "lk": 26596, + "lka": 21881, + "ll": 1657, + "ll": 865, + "lla": 15419, + "llama": 36679, + "llan": 17281, + "llan": 38728, + "lland": 31150, + "llc": 17161, + "lle": 26550, + "lle": 29732, + "llen": 41197, + "ller": 7722, + "llers": 26426, + "lli": 47015, + "lli": 13368, + "llis": 25518, + "lll": 27177, + "llll": 34874, + "llll": 43485, + "llo": 19293, + "lloy": 10092, + "lloyd": 33339, + "lloyd": 12400, + "llp": 28042, + "lls": 40535, + "lly": 26379, + "lm": 6981, + "lm": 15282, + "lma": 4493, + "lmao": 5121, + "lmaoo": 32623, + "lmaooo": 33362, + "lmaoooo": 45232, + "lmfa": 8928, + "lmfao": 11068, + "lmfaooo": 47658, + "lmp": 43575, + "lms": 30381, + "ln": 31644, + "ln": 18654, + "lng": 22339, + "lnp": 39679, + "lo": 549, + "lo": 2982, + "loa": 39678, + "load": 4515, + "load": 2834, + "loaded": 6756, + "loader": 28492, + "loading": 9975, + "loads": 8691, + "loaf": 26467, + "loaf": 18273, + "loan": 28431, + "loan": 8176, + "loans": 14206, + "lob": 11197, + "lob": 46606, + "lobal": 34574, + "lobb": 27698, + "lobby": 12449, + "lobbying": 36047, + "lobe": 46325, + "lobes": 24148, + "lobo": 39323, + "lobos": 36586, + "lobster": 13793, + "loc": 1378, + "loc": 25826, + "local": 9202, + "local": 2029, + "localized": 49399, + "locally": 15603, + "locals": 15041, + "locate": 20490, + "located": 5677, + "location": 4372, + "locations": 9580, + "loch": 20188, + "loch": 14101, + "lock": 7201, + "lock": 4381, + "lockdown": 35636, + "locke": 29698, + "locked": 8371, + "locker": 14053, + "lockhart": 48642, + "lockheed": 36637, + "locking": 19978, + "locks": 13212, + "lockscreen": 42439, + "loco": 25555, + "locom": 22798, + "locomo": 46147, + "locomotive": 30439, + "locu": 33635, + "locust": 46237, + "lod": 45650, + "lodge": 10504, + "loe": 30113, + "loe": 25484, + "loeb": 49334, + "lof": 15011, + "loff": 31008, + "loft": 35707, + "loft": 20049, + "loftus": 46689, + "log": 3239, + "log": 7383, + "logan": 20655, + "logan": 10569, + "logans": 40752, + "logg": 43002, + "logged": 31457, + "logger": 39089, + "logging": 24444, + "logi": 3177, + "logia": 48031, + "logic": 10670, + "logical": 4791, + "logically": 24782, + "logie": 33445, + "logies": 7378, + "login": 31121, + "logist": 7407, + "logistics": 14755, + "logists": 12233, + "logne": 19911, + "logo": 31480, + "logo": 5750, + "logos": 24879, + "logs": 22745, + "logue": 27785, + "logy": 22721, + "logy": 1659, + "loh": 49129, + "loh": 37983, + "loi": 35128, + "loid": 31408, + "loin": 21760, + "loire": 46040, + "lois": 27040, + "lok": 19908, + "lok": 23575, + "loki": 24435, + "lol": 10721, + "lol": 1824, + "lola": 19065, + "lolita": 42615, + "lolla": 45483, + "lolli": 27906, + "lollipop": 34605, + "lolly": 48264, + "lolo": 16895, + "lolo": 37481, + "lolol": 25280, + "lololol": 34738, + "lolz": 35260, + "lom": 9279, + "loma": 42889, + "lombar": 25493, + "lombard": 46461, + "lombardi": 44346, + "lomond": 48941, + "lon": 1235, + "lon": 6507, + "london": 6835, + "london": 1789, + "londonmarathon": 35018, + "lone": 22220, + "lone": 13576, + "lonel": 28872, + "loneliness": 30310, + "lonely": 34509, + "lonely": 12368, + "lonelyplanet": 44984, + "long": 4792, + "long": 1538, + "longe": 25793, + "longer": 5349, + "longest": 10731, + "longevity": 35354, + "longh": 20286, + "longhorn": 41047, + "longhorns": 38295, + "longing": 38482, + "longlive": 47840, + "longs": 43618, + "longtime": 19685, + "loo": 731, + "loo": 11804, + "look": 8874, + "look": 1012, + "lookalike": 38307, + "lookbook": 39184, + "looked": 4913, + "lookin": 11254, + "looking": 36898, + "looking": 1312, + "lookout": 18330, + "looks": 1606, + "lool": 33125, + "loom": 37440, + "loom": 17199, + "looming": 35384, + "looms": 30550, + "loon": 28222, + "loona": 48137, + "looney": 45315, + "looo": 20902, + "loool": 36016, + "looool": 47038, + "looooo": 31484, + "loop": 19606, + "loop": 10408, + "loops": 21625, + "loos": 45723, + "loose": 43815, + "loose": 9786, + "loot": 21518, + "lop": 36734, + "lop": 17066, + "lopes": 49269, + "lopez": 12982, + "lor": 2179, + "lor": 11335, + "lord": 18896, + "lord": 3486, + "lorde": 35483, + "lords": 14969, + "lore": 12880, + "lore": 27218, + "loren": 13602, + "loren": 33398, + "lorenzo": 21342, + "lores": 34510, + "loretta": 40863, + "lori": 20164, + "lori": 23095, + "lorna": 46316, + "lorraine": 27602, + "lorry": 31354, + "los": 32217, + "los": 3087, + "losange": 14037, + "losangeles": 14638, + "lose": 43318, + "lose": 5354, + "loser": 18168, + "losers": 23201, + "loses": 14263, + "losing": 7918, + "loss": 34761, + "loss": 4327, + "losses": 16909, + "lost": 14258, + "lost": 2624, + "lostdog": 48482, + "lot": 5132, + "lot": 1954, + "loth": 43625, + "lothian": 31360, + "lothing": 42058, + "lotion": 25260, + "lotr": 34165, + "lots": 2958, + "lott": 42854, + "lotta": 29125, + "lotte": 16535, + "lotte": 7274, + "lottery": 16975, + "lottie": 48517, + "lotto": 28265, + "lotus": 13824, + "lou": 2207, + "lou": 9745, + "loubout": 38369, + "loud": 22884, + "loud": 7464, + "louder": 25904, + "loudest": 49214, + "loudly": 39256, + "lough": 21927, + "lough": 28045, + "loughborough": 49153, + "loui": 42173, + "louie": 25790, + "louis": 8916, + "louis": 4459, + "louisa": 40011, + "louise": 32275, + "louise": 13076, + "louisi": 12187, + "louisiana": 12946, + "louisville": 13860, + "louisvuitton": 44911, + "loun": 6466, + "lounge": 7141, + "lounging": 45430, + "lour": 29383, + "lourdes": 45071, + "louvre": 36995, + "lov": 8923, + "lov": 21229, + "lova": 37394, + "lovable": 38565, + "lovato": 18960, + "love": 2618, + "love": 793, + "lovecraft": 42405, + "loved": 3249, + "lovefl": 38884, + "loveher": 38306, + "lovehim": 45733, + "loveis": 30931, + "loveisland": 30970, + "loveislove": 43603, + "loveit": 24764, + "lovel": 8999, + "lovelies": 31412, + "lovelondon": 46493, + "lovely": 33250, + "lovely": 2165, + "lovemy": 20041, + "lovemyjob": 40130, + "loven": 33754, + "lover": 28508, + "lover": 7168, + "lovers": 48416, + "lovers": 5973, + "loves": 37773, + "loves": 3925, + "lovethe": 33040, + "lovethem": 48298, + "lovett": 47095, + "lovewins": 47687, + "loveyou": 39226, + "loveyou": 25964, + "loveyour": 26462, + "lovin": 33442, + "lovin": 16354, + "loving": 29568, + "loving": 3721, + "lovingly": 44100, + "low": 1049, + "low": 1042, + "loway": 16104, + "lowe": 17910, + "lowed": 22733, + "lowell": 24458, + "lower": 32578, + "lower": 4909, + "lowered": 34968, + "lowering": 35261, + "lowers": 36398, + "lowes": 38515, + "lowest": 12098, + "lowing": 8283, + "lowkey": 29481, + "lowry": 27444, + "lows": 4406, + "lox": 41725, + "loy": 4519, + "loy": 23929, + "loyal": 13032, + "loyalty": 14686, + "loyd": 44212, + "loyed": 29279, + "loyment": 18307, + "loyola": 32569, + "lp": 22282, + "lp": 6392, + "lpc": 44092, + "lpg": 47905, + "lpga": 34295, + "lps": 32094, + "lr": 20572, + "lr": 7041, + "lrt": 32996, + "ls": 19051, + "ls": 1268, + "lsd": 43766, + "lse": 46127, + "lse": 43886, + "lsu": 35428, + "lsu": 15672, + "lt": 13642, + "lt": 3333, + "ltc": 27664, + "ltd": 6802, + "lte": 25202, + "lton": 14237, + "lu": 664, + "lu": 9657, + "lub": 22469, + "lub": 11836, + "lubbock": 37660, + "lubric": 40963, + "luc": 7013, + "luc": 28014, + "luca": 21053, + "lucas": 23425, + "lucas": 10225, + "lucci": 45849, + "luce": 46217, + "lucent": 41552, + "lucer": 36042, + "luch": 36646, + "lucha": 38449, + "luci": 8787, + "lucia": 22290, + "luciano": 46365, + "lucid": 44540, + "lucie": 39461, + "lucifer": 46224, + "lucifer": 27687, + "lucille": 47454, + "lucin": 27523, + "luck": 9647, + "luck": 2820, + "luckiest": 42469, + "luckily": 20100, + "lucknow": 29407, + "lucky": 20495, + "lucky": 4133, + "lucrative": 41485, + "lucy": 17262, + "lucy": 10120, + "lud": 14288, + "lude": 28755, + "ludo": 40141, + "ludwig": 30633, + "lue": 45199, + "luf": 25264, + "lufc": 17818, + "luffy": 39047, + "lufthan": 37769, + "lufthansa": 39145, + "lug": 45521, + "lugg": 19673, + "luggage": 20138, + "luhan": 20975, + "luigi": 28444, + "luis": 25231, + "luis": 11339, + "luiz": 39633, + "lujah": 31639, + "luk": 21652, + "luka": 34878, + "lukaku": 37177, + "lukas": 37941, + "luke": 11970, + "luke": 5652, + "lul": 20861, + "lulla": 37019, + "lullaby": 41676, + "lulu": 32052, + "lulu": 26935, + "lum": 18112, + "lum": 5997, + "lumb": 36231, + "lumber": 27421, + "lumber": 34692, + "lumi": 41437, + "lumia": 31912, + "lumin": 15867, + "luminous": 37913, + "lump": 38704, + "lumpur": 34411, + "lun": 3221, + "lun": 49390, + "luna": 14425, + "lunar": 16043, + "lunatic": 45874, + "lunch": 10954, + "lunch": 2772, + "luncheon": 15104, + "lunches": 29705, + "lunchtime": 14330, + "lund": 30975, + "lund": 20181, + "lunes": 35648, + "lung": 38479, + "lung": 16271, + "lungs": 27366, + "lup": 27413, + "lupita": 49352, + "lupus": 36017, + "lur": 14439, + "lure": 31376, + "lures": 46747, + "lurking": 29941, + "lus": 7158, + "lusci": 38004, + "luscious": 39935, + "lush": 40382, + "lush": 16263, + "lust": 42071, + "lust": 12662, + "lustre": 46673, + "luther": 21848, + "luther": 17208, + "lutheran": 27341, + "luton": 28288, + "luv": 24726, + "luv": 8502, + "lux": 3439, + "lux": 16704, + "luxe": 26373, + "luxemb": 21314, + "luxembour": 22712, + "luxembourg": 23949, + "luxu": 16112, + "luxurious": 17292, + "luxury": 12083, + "luxury": 5247, + "luxurytravel": 29010, + "luz": 41008, + "lv": 10862, + "lv": 11184, + "lvl": 31256, + "lw": 40515, + "lw": 35115, + "lx": 30789, + "ly": 1251, + "ly": 597, + "lydia": 24316, + "lyf": 43688, + "lyfe": 30787, + "lyft": 32944, + "lying": 7175, + "lyk": 46376, + "lyle": 36828, + "lym": 20087, + "lyme": 31167, + "lymph": 30073, + "lymphoma": 37648, + "lyn": 3957, + "lyn": 5054, + "lynch": 31586, + "lynch": 13560, + "lynd": 33416, + "lynda": 42959, + "lyndon": 48518, + "lynn": 25303, + "lynn": 10667, + "lynne": 26900, + "lynx": 28941, + "lyon": 17176, + "lyons": 29453, + "lyric": 24366, + "lyric": 21291, + "lyrical": 33358, + "lyricist": 49013, + "lyrics": 9551, + "lyrix": 46814, + "lys": 45054, + "lyte": 40059, + "lywood": 4012, + "lz": 30818, + "lé": 39641, + "m": 76, + "m": 332, + "ma": 577, + "ma": 1226, + "maa": 42774, + "maa": 21555, + "maan": 33668, + "maar": 48927, + "maas": 43332, + "mab": 35639, + "mabel": 47319, + "mable": 23001, + "mably": 40082, + "mabu": 44682, + "mac": 1961, + "mac": 4945, + "macar": 21558, + "macaroni": 41824, + "macarthur": 36785, + "macau": 43984, + "macau": 33370, + "macbeth": 36321, + "macbook": 20617, + "macdonald": 20315, + "mace": 44869, + "maced": 21102, + "macedonia": 27071, + "macfar": 45374, + "macfarlane": 48825, + "mach": 2637, + "mach": 35091, + "machado": 42318, + "mache": 43220, + "macher": 29330, + "machi": 41783, + "machin": 17972, + "machine": 11539, + "machine": 4169, + "machinelearning": 13621, + "machinery": 21858, + "machines": 11108, + "machining": 45562, + "macho": 43977, + "macht": 45225, + "macin": 36533, + "mack": 8590, + "mack": 12145, + "mackay": 32497, + "macken": 48057, + "mackenzie": 22351, + "mackerel": 35002, + "mackin": 26010, + "macklemore": 41758, + "macle": 33843, + "maclean": 47137, + "macleod": 43684, + "macmillan": 36364, + "macmillan": 35191, + "macon": 35818, + "macos": 45469, + "macqu": 38365, + "macquarie": 40858, + "macro": 20891, + "macro": 16626, + "macron": 24859, + "macs": 46548, + "macy": 17113, + "macys": 47652, + "mad": 2740, + "mad": 3843, + "mada": 37799, + "madagas": 24758, + "madagascar": 25744, + "madam": 33634, + "madam": 27538, + "madame": 23507, + "madd": 31717, + "madden": 19093, + "maddie": 39959, + "maddie": 18875, + "maddow": 32644, + "maddy": 31734, + "made": 5388, + "made": 1105, + "madein": 13670, + "madeira": 33810, + "madel": 34532, + "madele": 29831, + "madeleine": 33264, + "madeline": 33905, + "madewith": 28627, + "madewithunity": 43190, + "madhu": 23000, + "madhuri": 38346, + "madhuridixit": 43889, + "madhya": 48302, + "madi": 6527, + "madi": 27282, + "madison": 24798, + "madison": 8791, + "madmen": 45452, + "madness": 8755, + "madon": 44852, + "madonna": 14137, + "madra": 27416, + "madras": 42046, + "madre": 42130, + "madri": 5529, + "madrid": 5909, + "mads": 41201, + "madu": 34913, + "madurai": 49159, + "maduro": 32912, + "mae": 16898, + "mae": 17339, + "maer": 47088, + "maestro": 24140, + "mafi": 47164, + "mafia": 14890, + "mag": 1191, + "mag": 4508, + "maga": 8694, + "magaz": 2974, + "magazine": 3113, + "magazines": 22253, + "magdal": 29673, + "mage": 46568, + "mage": 10923, + "magee": 43872, + "magenta": 38091, + "magento": 42442, + "mages": 31059, + "maggi": 29611, + "maggie": 41443, + "maggie": 14524, + "maggio": 49087, + "magh": 45555, + "magi": 19270, + "magic": 13061, + "magic": 3778, + "magical": 36408, + "magical": 7823, + "magician": 26368, + "magin": 42678, + "maging": 41310, + "magn": 10290, + "magna": 34076, + "magne": 9921, + "magnesium": 36379, + "magnet": 18240, + "magnetic": 13838, + "magnets": 33030, + "magni": 24297, + "magnific": 9725, + "magnificent": 10724, + "magnitude": 22955, + "magno": 21184, + "magnolia": 27123, + "magnu": 45198, + "magnum": 23496, + "magnus": 26275, + "magpie": 45973, + "mags": 31021, + "maguire": 26470, + "mah": 7206, + "mah": 10801, + "maha": 12237, + "maha": 33983, + "mahal": 22301, + "mahan": 45191, + "mahar": 11635, + "maharaj": 38488, + "maharashtra": 19328, + "mahat": 32434, + "mahatma": 40530, + "mahe": 15756, + "maher": 29826, + "mahesh": 33448, + "mahesh": 22095, + "mahi": 32529, + "mahi": 38659, + "mahin": 24113, + "mahindra": 31285, + "mahmoud": 41361, + "mahog": 30804, + "mahogany": 33084, + "mahon": 45864, + "mahon": 20371, + "mahone": 26634, + "mai": 7138, + "mai": 14595, + "maia": 46585, + "maid": 23148, + "maid": 10226, + "maidan": 37346, + "maiden": 37011, + "maiden": 13809, + "maids": 27305, + "maidstone": 44395, + "mail": 10478, + "mail": 2614, + "mailbox": 31482, + "mailed": 42314, + "mailing": 26680, + "mailonline": 26021, + "mails": 45213, + "main": 3904, + "main": 2623, + "maine": 18639, + "maine": 7836, + "mained": 15609, + "mainedcm": 15845, + "mainland": 27629, + "mainly": 15280, + "mains": 33656, + "mainst": 42102, + "mainstream": 18034, + "maintain": 12954, + "maintained": 26665, + "maintaining": 21964, + "maintains": 38335, + "mainten": 9399, + "maintenance": 9610, + "mais": 28153, + "maisie": 47355, + "maison": 37065, + "maison": 27626, + "mait": 26387, + "maize": 35386, + "maj": 2948, + "maj": 28723, + "maja": 47498, + "maje": 9852, + "majestic": 15335, + "majesty": 21188, + "major": 8008, + "major": 3350, + "majority": 10508, + "majors": 23597, + "mak": 11271, + "mak": 19253, + "makar": 42242, + "makati": 39402, + "make": 3232, + "make": 1078, + "makeaw": 45859, + "makeinindia": 42739, + "makeit": 26308, + "maken": 47093, + "makeover": 17926, + "maker": 15196, + "maker": 4836, + "makers": 6577, + "makerspace": 42400, + "makes": 2088, + "makeshift": 43274, + "makeu": 41707, + "makeup": 26402, + "makeup": 5853, + "makeyourown": 34090, + "makeyourownlane": 34823, + "maki": 34514, + "makin": 43096, + "makin": 22407, + "making": 17976, + "making": 1665, + "makk": 39852, + "maknae": 44118, + "mako": 49061, + "mal": 1662, + "mal": 3796, + "mala": 28290, + "malade": 36928, + "malaga": 35395, + "malala": 41137, + "malam": 48956, + "malaria": 24929, + "malawi": 23405, + "malay": 5323, + "malay": 42430, + "malayalam": 34860, + "malaysi": 39668, + "malaysia": 8146, + "malaysian": 21136, + "malbec": 47741, + "malcol": 12645, + "malcolm": 14139, + "maldives": 16795, + "male": 11326, + "male": 2801, + "males": 14426, + "malhotra": 28866, + "mali": 6701, + "mali": 22669, + "malia": 46714, + "malibu": 21723, + "malicious": 42147, + "malign": 41122, + "malik": 11394, + "mall": 10984, + "mall": 6220, + "mallorca": 28082, + "mallory": 38968, + "malls": 36447, + "malm": 44071, + "malnutrition": 41153, + "malo": 43518, + "malone": 19852, + "maloney": 45897, + "mals": 25370, + "malt": 21688, + "malta": 16989, + "maltese": 39838, + "malvern": 39356, + "malware": 24153, + "mam": 4404, + "mam": 17778, + "mama": 7133, + "mamamoo": 36012, + "mamas": 42395, + "mamba": 44189, + "mament": 45690, + "mami": 43858, + "mamma": 34893, + "mammal": 33385, + "mammals": 31987, + "mammoth": 28022, + "man": 723, + "man": 786, + "mana": 29467, + "mana": 15837, + "manafort": 40108, + "manag": 1830, + "manage": 9770, + "managed": 7928, + "management": 3319, + "manager": 3898, + "managerial": 44261, + "managers": 12853, + "manages": 29699, + "managing": 10892, + "manas": 44188, + "manatee": 46558, + "mance": 2324, + "manchester": 24424, + "manchester": 4651, + "mancini": 47681, + "mancity": 31538, + "mancrush": 36945, + "mancrushmonday": 39307, + "mand": 4325, + "mand": 27244, + "mandala": 41106, + "mandarin": 26455, + "mandate": 26228, + "mandatory": 19934, + "mandel": 34960, + "mandela": 16280, + "mandi": 38961, + "mandir": 35815, + "mando": 34006, + "mands": 12340, + "mandu": 31440, + "mandy": 41505, + "mandy": 24302, + "mane": 44471, + "mane": 16044, + "maneu": 33216, + "mang": 25616, + "mang": 31096, + "manga": 11873, + "mangal": 43027, + "manger": 48251, + "mango": 43831, + "mango": 13962, + "mangrove": 47180, + "manhatt": 10152, + "manhattan": 10961, + "mani": 5654, + "mani": 10718, + "mania": 8435, + "maniac": 31814, + "maniacs": 41444, + "manian": 40077, + "manic": 23017, + "manic": 37825, + "manicure": 33637, + "manife": 14379, + "manifest": 34422, + "manifestation": 48348, + "manifesto": 20907, + "manil": 38827, + "manila": 10969, + "manipu": 40261, + "manipul": 19237, + "manipulation": 30277, + "manipur": 47757, + "manish": 41759, + "manish": 44720, + "manit": 15693, + "manitoba": 20342, + "manjaro": 41489, + "mankind": 24155, + "manly": 25194, + "mann": 19396, + "mann": 4783, + "manne": 30160, + "manned": 26139, + "mannequin": 43388, + "manner": 20700, + "manners": 31693, + "manning": 15996, + "manny": 37054, + "manny": 20933, + "mano": 15753, + "mano": 24016, + "manoj": 41146, + "manor": 41830, + "manor": 13614, + "mans": 28422, + "mans": 7746, + "mansfield": 25543, + "manship": 15460, + "mansion": 13404, + "manslaughter": 48632, + "manson": 26715, + "mant": 25122, + "mant": 27037, + "manta": 41431, + "mantis": 39946, + "mantle": 22159, + "mantra": 25162, + "manu": 3404, + "manu": 25799, + "manual": 12268, + "manuel": 29171, + "manuel": 9567, + "manufac": 5105, + "manufacture": 27741, + "manufactured": 24010, + "manufacturer": 15668, + "manufacturers": 18763, + "manufacturing": 8386, + "manure": 47907, + "manus": 28181, + "manuscript": 24365, + "manuscripts": 40765, + "manutd": 20994, + "many": 28484, + "many": 1346, + "manziel": 40637, + "mao": 47447, + "mao": 25605, + "maori": 43400, + "map": 25180, + "map": 3923, + "maple": 21980, + "maple": 10570, + "mapleleafs": 41257, + "mapoli": 28768, + "mapp": 36894, + "mapped": 41596, + "mapping": 15231, + "maps": 8765, + "mapu": 42082, + "mar": 675, + "mar": 3091, + "mara": 15655, + "marais": 47913, + "maran": 44732, + "marath": 16274, + "marathi": 34102, + "marathon": 40764, + "marathon": 5910, + "marau": 38475, + "marbella": 36182, + "marble": 45429, + "marble": 13071, + "marbles": 42931, + "marc": 14054, + "marc": 9075, + "marca": 38242, + "marcel": 17726, + "marcel": 24652, + "marcelo": 35939, + "march": 10638, + "march": 2227, + "marche": 36173, + "marched": 37976, + "marches": 38249, + "marchfor": 31721, + "marching": 15082, + "marchmadness": 28555, + "marci": 36698, + "marcia": 41075, + "marck": 47733, + "marco": 24719, + "marco": 10924, + "marcor": 39945, + "marcorubio": 41143, + "marcos": 21696, + "marcu": 20760, + "marcus": 48955, + "marcus": 9895, + "mardi": 39728, + "mardi": 29229, + "mardigras": 43343, + "mare": 26512, + "mare": 8870, + "mares": 19724, + "marg": 44014, + "margar": 16838, + "margare": 10232, + "margaret": 12185, + "margarita": 25958, + "margaritas": 42679, + "margate": 37428, + "margin": 19464, + "margin": 21357, + "marginal": 38320, + "margins": 33763, + "margot": 37144, + "mari": 2603, + "mari": 19322, + "maria": 41109, + "maria": 6595, + "mariachi": 44299, + "mariah": 31214, + "mariah": 24789, + "mariahcarey": 36538, + "marian": 41129, + "marian": 24677, + "mariana": 44224, + "marianne": 32214, + "mariano": 43988, + "marie": 20657, + "marie": 7864, + "marietta": 46634, + "marig": 41002, + "marijuana": 9864, + "maril": 14611, + "marilyn": 38959, + "marilyn": 18489, + "marin": 8910, + "marin": 23992, + "marina": 12060, + "marinated": 33406, + "marine": 20674, + "marine": 5746, + "mariner": 39972, + "mariners": 19086, + "marines": 15018, + "marino": 30878, + "mario": 39176, + "mario": 7600, + "marion": 37765, + "marion": 18397, + "maris": 21512, + "maris": 33093, + "marisa": 42938, + "mariska": 44703, + "marissa": 31219, + "marist": 48223, + "mariti": 13124, + "maritime": 14331, + "marj": 38639, + "mark": 3805, + "mark": 2110, + "marke": 2399, + "marked": 12360, + "marker": 18170, + "markers": 23664, + "market": 11614, + "market": 2196, + "marketer": 33482, + "marketers": 23682, + "marketing": 19535, + "marketing": 2905, + "marketplace": 18241, + "markets": 7292, + "markham": 39817, + "marking": 14705, + "markings": 41046, + "markle": 32672, + "marko": 38338, + "marks": 5466, + "markus": 33725, + "marl": 24922, + "marlborough": 43515, + "marlene": 45117, + "marley": 16504, + "marlin": 34275, + "marlins": 23309, + "marlon": 32995, + "marmalade": 39068, + "marnock": 48305, + "maro": 27029, + "maroon": 20501, + "marqu": 20704, + "marque": 13012, + "marquee": 27725, + "marquette": 37624, + "marquez": 27317, + "marquis": 33530, + "marr": 32871, + "marrake": 37125, + "marrakech": 39006, + "marri": 3839, + "marriage": 38047, + "marriage": 7040, + "marriages": 38190, + "married": 6791, + "marries": 46283, + "marriott": 19211, + "marrow": 31030, + "marry": 13288, + "marrying": 40507, + "mars": 41469, + "mars": 7496, + "marsden": 43344, + "marse": 26577, + "marseille": 30365, + "marsh": 9237, + "marsh": 13505, + "marsha": 21491, + "marshal": 26608, + "marshall": 30939, + "marshall": 9811, + "marshals": 44175, + "marshes": 43450, + "marshmal": 21069, + "marshmallow": 28530, + "marshmallows": 39471, + "mart": 2348, + "mart": 7772, + "marta": 32858, + "martens": 43211, + "marth": 34493, + "martha": 16427, + "marti": 20577, + "martial": 17088, + "martialarts": 35895, + "martian": 30214, + "martin": 6929, + "martin": 3690, + "martina": 34393, + "martinez": 13913, + "marting": 47570, + "martini": 22199, + "martino": 41675, + "martins": 30569, + "marty": 9926, + "marty": 17169, + "martyn": 44075, + "martyr": 36155, + "martyr": 26067, + "martyrdom": 43110, + "martyred": 39114, + "martyrs": 24707, + "maru": 37413, + "maru": 31838, + "marvel": 13835, + "marvel": 5996, + "marvelcomics": 46897, + "marvell": 26576, + "marvellous": 28402, + "marvelous": 25487, + "marvin": 19675, + "marx": 30559, + "marx": 26001, + "marxist": 45205, + "mary": 5146, + "mary": 2676, + "maryam": 33636, + "maryam": 36393, + "maryland": 11379, + "marys": 40905, + "marys": 40228, + "mas": 5226, + "mas": 1412, + "masa": 24995, + "masa": 41868, + "masala": 31483, + "masc": 23564, + "mascar": 46984, + "mascara": 31635, + "mascot": 13983, + "mascots": 43266, + "mascul": 25589, + "masculine": 48269, + "masculinity": 40465, + "mase": 49128, + "maser": 25798, + "maserati": 30442, + "mash": 12317, + "mash": 15680, + "mashable": 41026, + "mashed": 27395, + "mashup": 27079, + "masi": 35965, + "masjid": 31420, + "mask": 19262, + "mask": 8306, + "masked": 25757, + "masking": 47046, + "masks": 19055, + "maslow": 44359, + "mason": 17424, + "mason": 9699, + "masonic": 36491, + "masonry": 30764, + "masons": 37195, + "masqu": 26593, + "masquer": 29604, + "masquerade": 36944, + "mass": 4636, + "mass": 4854, + "massach": 14484, + "massachuse": 14577, + "massachusetts": 14756, + "massacre": 14696, + "massage": 13055, + "masse": 41735, + "masses": 22978, + "massey": 29868, + "massi": 17239, + "massimo": 45821, + "massive": 4818, + "massively": 34297, + "mast": 45916, + "mast": 27920, + "master": 4534, + "master": 3498, + "mastercard": 40542, + "masterchef": 34809, + "masterclass": 17529, + "mastered": 32616, + "masterful": 46823, + "mastering": 28326, + "mastermind": 34029, + "masterpiece": 12066, + "masterpieces": 37596, + "masters": 6913, + "mastery": 34800, + "mastiff": 42311, + "maswar": 47887, + "mat": 905, + "mat": 9063, + "mata": 17270, + "match": 7733, + "match": 2439, + "matcha": 32433, + "matchday": 15947, + "matched": 17792, + "matches": 8609, + "matching": 11840, + "matchup": 19355, + "matchups": 49162, + "mate": 6137, + "mate": 2936, + "mated": 33813, + "mateo": 34991, + "mater": 23724, + "materi": 7084, + "material": 7118, + "materials": 8161, + "maternal": 26131, + "maternity": 23894, + "mates": 5817, + "math": 13277, + "math": 6025, + "mathe": 8725, + "mathemat": 11901, + "mathematical": 25609, + "mathematician": 41036, + "mathematics": 20113, + "mathew": 36333, + "mathews": 37120, + "mathi": 23014, + "mathieu": 40417, + "maths": 14763, + "mati": 12716, + "mati": 32268, + "matic": 36859, + "matic": 7900, + "matically": 38282, + "matics": 23634, + "matil": 26751, + "matilda": 36308, + "matin": 44849, + "matinee": 38525, + "mating": 34346, + "mation": 11701, + "matisse": 43446, + "mato": 13127, + "matologist": 48842, + "matology": 27940, + "matory": 25519, + "matri": 27041, + "matrix": 18078, + "mats": 22259, + "matsu": 30242, + "matt": 7972, + "matt": 3972, + "mattb": 42791, + "matte": 31237, + "matte": 19771, + "mattel": 35365, + "matteo": 33120, + "matter": 30471, + "matter": 3828, + "matters": 5708, + "matth": 41846, + "matthe": 5116, + "matthew": 17588, + "matthew": 7008, + "matthews": 16739, + "matthi": 29853, + "matthias": 45104, + "matti": 39840, + "mattress": 23438, + "matty": 31233, + "matty": 29176, + "matu": 40616, + "matur": 22897, + "mature": 14417, + "maturity": 28047, + "mau": 8134, + "mau": 23033, + "maui": 20463, + "maul": 30725, + "maur": 10574, + "maure": 25191, + "maureen": 31723, + "maurice": 20200, + "mauricio": 39066, + "mauriti": 28406, + "mauritius": 29305, + "mauro": 41691, + "mav": 25697, + "maver": 16700, + "maverick": 27425, + "mavericks": 30092, + "mavs": 30665, + "maw": 39351, + "maw": 42271, + "mawards": 37682, + "max": 4898, + "max": 3902, + "maxi": 8554, + "maxi": 23266, + "maxim": 19892, + "maxim": 38574, + "maximize": 28673, + "maximum": 13162, + "maximus": 44312, + "maxine": 38468, + "maxwell": 19611, + "maxx": 37466, + "may": 1686, + "may": 1270, + "maya": 45783, + "maya": 12987, + "mayan": 37952, + "maybe": 3746, + "mayday": 29957, + "mayer": 21196, + "mayfair": 35171, + "mayfield": 33933, + "mayhem": 21502, + "maymay": 26600, + "maymay": 33853, + "maymayentrata": 30480, + "maynard": 32487, + "mayne": 35771, + "mayo": 22449, + "mayo": 11280, + "mayor": 15429, + "mayor": 4676, + "mayoral": 28983, + "mayorof": 43533, + "mayors": 28501, + "mays": 35445, + "maythe": 42281, + "mayward": 45751, + "mayward": 23519, + "mayweather": 22774, + "maz": 9177, + "maz": 36215, + "mazda": 18506, + "maze": 21988, + "mazz": 29439, + "mañ": 37059, + "mañana": 39354, + "mb": 758, + "mb": 3996, + "mba": 8329, + "mban": 46685, + "mbar": 44452, + "mbb": 10736, + "mbc": 20137, + "mbe": 38395, + "mbe": 27004, + "mber": 5467, + "mber": 1034, + "mberg": 26372, + "mbers": 5443, + "mbi": 45347, + "mble": 20310, + "mble": 4756, + "mbles": 28693, + "mbling": 28604, + "mbo": 25733, + "mbo": 11319, + "mbps": 44896, + "mbs": 10370, + "mbta": 38979, + "mbu": 42228, + "mbuhari": 36752, + "mc": 1278, + "mc": 4126, + "mca": 40570, + "mca": 14635, + "mcal": 28663, + "mcar": 43776, + "mcbride": 35080, + "mcc": 21192, + "mccabe": 37628, + "mccaf": 47385, + "mccain": 20397, + "mccall": 34844, + "mccann": 27140, + "mccar": 9570, + "mccarthy": 16974, + "mccartney": 19958, + "mccl": 24709, + "mccla": 43672, + "mccle": 40139, + "mcclure": 44945, + "mcco": 46152, + "mccon": 32638, + "mccor": 23057, + "mccormack": 45164, + "mccormick": 39088, + "mccoy": 20218, + "mccr": 41996, + "mccre": 25393, + "mccul": 38833, + "mccull": 41782, + "mcd": 28930, + "mcder": 27355, + "mcdermott": 34504, + "mcdon": 12171, + "mcdonald": 10741, + "mcdonalds": 17674, + "mcdonnell": 34360, + "mcdowell": 34119, + "mce": 26864, + "mcel": 28752, + "mcen": 47423, + "mcfad": 36976, + "mcfadden": 42105, + "mcfar": 29020, + "mcfarlane": 47174, + "mcfc": 16416, + "mcfly": 38211, + "mcg": 42507, + "mcg": 27995, + "mcgee": 29223, + "mcgill": 46524, + "mcgill": 35511, + "mcgin": 29596, + "mcgowan": 40462, + "mcgr": 25169, + "mcgra": 29367, + "mcgrath": 28759, + "mcgraw": 40950, + "mcgregor": 19642, + "mcgu": 34294, + "mcguinness": 45299, + "mcguire": 32635, + "mci": 46212, + "mci": 45491, + "mcil": 30481, + "mcin": 18770, + "mcintosh": 45353, + "mcintyre": 33369, + "mck": 6781, + "mckay": 33611, + "mcke": 27424, + "mckee": 43529, + "mcken": 42619, + "mckenna": 24924, + "mckenzie": 25502, + "mckin": 15437, + "mckinley": 39891, + "mckinney": 33554, + "mckinnon": 48736, + "mckinsey": 48143, + "mcl": 49021, + "mcla": 12565, + "mclaren": 37381, + "mclaren": 16789, + "mclau": 32285, + "mclaughlin": 35346, + "mcle": 25299, + "mclean": 28666, + "mcleod": 40259, + "mcm": 12251, + "mcmahon": 24026, + "mcmaster": 42703, + "mcmillan": 45603, + "mcn": 42919, + "mcnam": 32682, + "mcnamara": 37506, + "mcne": 42545, + "mco": 33723, + "mcqueen": 22544, + "mcr": 29884, + "mcr": 16966, + "mcs": 27020, + "mcu": 30403, + "md": 8637, + "md": 4732, + "mdc": 38773, + "mdc": 41761, + "mds": 48746, + "mdt": 40822, + "me": 613, + "me": 614, + "mea": 46045, + "mea": 17711, + "mead": 12134, + "mead": 21567, + "meade": 37218, + "meado": 16402, + "meadow": 25213, + "meadow": 17195, + "meadows": 17178, + "meal": 29662, + "meal": 5478, + "meals": 11229, + "mean": 4189, + "mean": 3450, + "meand": 48015, + "meaning": 14586, + "meaning": 8342, + "meaningful": 17480, + "meaningless": 48932, + "meanings": 45814, + "means": 3494, + "meant": 8674, + "meantime": 27499, + "meanwhile": 9650, + "meas": 5867, + "measles": 38230, + "measurable": 48010, + "measure": 15261, + "measure": 10579, + "measured": 23154, + "measurement": 20973, + "measurements": 29894, + "measures": 11936, + "measuring": 18064, + "meat": 10805, + "meat": 6480, + "meatball": 43642, + "meatballs": 29233, + "meath": 37920, + "meatless": 48085, + "meats": 29558, + "mec": 27432, + "mecca": 36095, + "mech": 38305, + "mechan": 6715, + "mechanic": 24582, + "mechanical": 14467, + "mechanics": 20536, + "mechanism": 22576, + "mechanisms": 28610, + "meck": 41908, + "med": 1948, + "med": 2177, + "meda": 33614, + "medal": 29714, + "medal": 6974, + "medalist": 21040, + "medalists": 43397, + "medalli": 31349, + "medallion": 43469, + "medallist": 41472, + "medals": 14710, + "mede": 48225, + "meded": 27627, + "medi": 1436, + "media": 22064, + "media": 1895, + "mediac": 37490, + "median": 30491, + "mediation": 42829, + "medic": 3602, + "medic": 35441, + "medicaid": 25421, + "medical": 18432, + "medical": 4116, + "medicare": 23710, + "medication": 23771, + "medications": 37181, + "medicinal": 28772, + "medicine": 5616, + "medicines": 26541, + "medics": 46688, + "medieval": 38956, + "medieval": 10789, + "medina": 27281, + "mediocre": 41170, + "medit": 19130, + "meditate": 38039, + "meditation": 10827, + "mediter": 14194, + "mediterran": 14358, + "mediterranean": 15327, + "medium": 8675, + "medley": 24793, + "meds": 25075, + "medtech": 42044, + "medusa": 44216, + "medway": 42286, + "mee": 1725, + "mee": 14075, + "meek": 28935, + "meen": 37940, + "meen": 46515, + "meer": 26714, + "meer": 27555, + "meet": 5714, + "meet": 1633, + "meeting": 48566, + "meeting": 2071, + "meetings": 9980, + "meets": 5972, + "meetthe": 27575, + "meetup": 15430, + "meg": 11500, + "meg": 16186, + "mega": 15979, + "mega": 9068, + "megab": 38103, + "megadeth": 46741, + "megal": 37650, + "megam": 26073, + "megan": 19127, + "megan": 11503, + "megap": 33624, + "megat": 35581, + "megh": 31192, + "meghan": 39939, + "meghan": 18261, + "meh": 10512, + "meh": 22211, + "mehta": 25031, + "mei": 22564, + "mei": 25198, + "meier": 29812, + "mein": 28857, + "mein": 21466, + "meister": 28407, + "mek": 44645, + "mel": 1902, + "mel": 6834, + "mela": 35032, + "melan": 22261, + "melanch": 44818, + "melancholy": 47821, + "melani": 34031, + "melania": 32796, + "melanie": 22153, + "melanoma": 40862, + "melb": 47007, + "melb": 28980, + "melbourne": 28387, + "melbourne": 6995, + "melee": 45108, + "meli": 28885, + "melinda": 46303, + "melis": 18913, + "melissa": 41866, + "melissa": 13030, + "mell": 22531, + "mell": 41583, + "mello": 47594, + "mellon": 45162, + "mellow": 32034, + "melo": 10354, + "melo": 22374, + "melodic": 41877, + "melodies": 38412, + "melody": 19119, + "melon": 12146, + "melrose": 36296, + "melt": 22209, + "melt": 15957, + "meltdown": 30613, + "melted": 23037, + "melting": 19247, + "melton": 46062, + "melts": 31446, + "melville": 46030, + "melvin": 31544, + "mely": 6373, + "mem": 4937, + "mem": 34944, + "memb": 2114, + "member": 29566, + "member": 1640, + "members": 2567, + "membership": 11562, + "membrane": 34088, + "meme": 35157, + "meme": 9169, + "memes": 12828, + "memo": 15967, + "memo": 19334, + "memoir": 20532, + "memoirs": 45311, + "memor": 1858, + "memorab": 26271, + "memorabilia": 27488, + "memorable": 13172, + "memorial": 16285, + "memorial": 4642, + "memorialday": 21598, + "memoriam": 48191, + "memories": 4304, + "memory": 44766, + "memory": 5137, + "memph": 10285, + "memphis": 38432, + "memphis": 11298, + "men": 1552, + "men": 1656, + "mena": 23052, + "menace": 29949, + "mend": 8151, + "mend": 46927, + "mendel": 49268, + "mendes": 18060, + "mendez": 48275, + "mendo": 19327, + "mendoza": 23680, + "meng": 37102, + "meng": 37450, + "mening": 46428, + "menon": 38255, + "menopau": 34974, + "menopause": 46026, + "mens": 16924, + "mens": 10495, + "mensfashion": 27578, + "menstru": 28345, + "menstrual": 40915, + "menswear": 18803, + "ment": 1585, + "ment": 777, + "mental": 8611, + "mental": 3448, + "mentalhealth": 20593, + "mentalhealth": 13022, + "mentality": 26647, + "mentally": 14307, + "mentary": 4468, + "mentation": 9512, + "mentday": 40397, + "mente": 40302, + "mente": 36396, + "mented": 9249, + "menting": 14471, + "mention": 43881, + "mention": 6762, + "mentioned": 11948, + "mentioning": 34290, + "mentions": 12334, + "mento": 30582, + "mentor": 45342, + "mentor": 11642, + "mentoring": 19610, + "mentors": 20945, + "mentorship": 33878, + "ments": 1827, + "menu": 6225, + "menus": 33534, + "meo": 30792, + "meow": 39965, + "meow": 17246, + "mep": 27095, + "mer": 1316, + "mer": 2452, + "mera": 20028, + "merc": 34357, + "merc": 44399, + "mercado": 45479, + "merce": 8409, + "mercede": 34959, + "mercedes": 26403, + "mercedes": 10685, + "mercedesam": 40107, + "mercedesbenz": 32347, + "mercen": 40301, + "mercer": 21632, + "merch": 11504, + "merchandi": 14954, + "merchandise": 16808, + "merchandising": 49196, + "merchant": 19563, + "merchants": 34427, + "merci": 23364, + "merci": 29378, + "mercur": 11471, + "mercury": 45203, + "mercury": 12653, + "mercy": 33249, + "mercy": 10815, + "mere": 29657, + "mere": 10342, + "mered": 24657, + "mered": 32297, + "meredith": 25103, + "merely": 28718, + "merge": 30406, + "merged": 46492, + "merger": 24744, + "merging": 49256, + "meri": 17993, + "meri": 36109, + "meria": 48433, + "meric": 27097, + "merica": 30561, + "meridi": 37901, + "meridian": 31195, + "mering": 41060, + "meringue": 41661, + "merino": 42648, + "merit": 20830, + "merkel": 24715, + "merle": 48586, + "merlin": 26517, + "merlot": 40424, + "mermaid": 16064, + "mermaids": 43617, + "mero": 19097, + "merr": 48288, + "merri": 21462, + "merrill": 47713, + "merritt": 36462, + "merry": 14167, + "merry": 5779, + "merrychristmas": 19672, + "mers": 4199, + "mersal": 36711, + "mersey": 25248, + "mersey": 46239, + "merseyside": 35382, + "mert": 48496, + "merton": 35315, + "mery": 40873, + "meryl": 35787, + "mes": 28432, + "mes": 3029, + "mesa": 18956, + "mese": 42018, + "mesh": 15030, + "mesm": 18695, + "mesmer": 38435, + "mesmeri": 25985, + "mesmerizing": 35637, + "meso": 25537, + "mesqu": 46819, + "mess": 2490, + "mess": 8188, + "message": 3918, + "messages": 9390, + "messaging": 23234, + "messe": 40391, + "messed": 23580, + "messenger": 17389, + "messi": 19394, + "messi": 11252, + "messiah": 28737, + "messing": 23144, + "messy": 15987, + "mest": 23780, + "mester": 47349, + "mesut": 49177, + "met": 5249, + "met": 2340, + "meta": 14803, + "meta": 22701, + "metab": 16150, + "metabol": 48389, + "metaboli": 25573, + "metabolic": 34311, + "metabolism": 27824, + "metal": 8935, + "metal": 4044, + "metall": 19084, + "metallic": 17257, + "metallica": 24079, + "metals": 21375, + "metam": 28862, + "metamor": 39030, + "metamorpho": 47601, + "metaph": 24189, + "metaphor": 34233, + "metast": 41973, + "mete": 11226, + "meteor": 26429, + "meteor": 26823, + "meteoro": 25948, + "meteorologist": 42849, + "meter": 10104, + "meters": 13247, + "metgala": 30089, + "meth": 21867, + "meth": 26177, + "methane": 37565, + "metho": 5770, + "method": 10284, + "methodist": 25165, + "methodo": 28488, + "methodology": 37316, + "methods": 12200, + "methyl": 48999, + "metmuseum": 28207, + "meto": 25679, + "metoo": 24722, + "metr": 15086, + "metre": 27889, + "metres": 19798, + "metric": 19950, + "metrical": 40704, + "metrics": 24396, + "metro": 7257, + "metro": 6784, + "metroid": 39957, + "metropolis": 40476, + "metropolitan": 19013, + "metry": 20039, + "mets": 9633, + "mett": 28081, + "metz": 40506, + "meu": 34520, + "mew": 40368, + "mex": 3213, + "mex": 18387, + "mexic": 31728, + "mexican": 37442, + "mexican": 8186, + "mexicans": 47729, + "mexico": 31834, + "mexico": 4604, + "mey": 28584, + "mey": 27777, + "meyer": 13963, + "meyers": 32326, + "mez": 30615, + "mez": 46833, + "mezz": 38771, + "mf": 18199, + "mf": 11067, + "mfa": 24107, + "mfc": 39474, + "mfg": 21912, + "mfw": 27309, + "mg": 10003, + "mg": 8014, + "mga": 23954, + "mgm": 27572, + "mgmt": 22288, + "mgr": 31500, + "mgs": 48073, + "mgt": 48663, + "mh": 9962, + "mh": 10834, + "mha": 41944, + "mhealth": 41225, + "mhs": 28815, + "mhz": 31550, + "mi": 714, + "mi": 2251, + "mia": 5852, + "miam": 31053, + "miami": 15106, + "miami": 4891, + "mian": 24792, + "miaw": 36046, + "mib": 48178, + "mic": 1213, + "mic": 3816, + "mica": 41551, + "micah": 33870, + "mice": 19030, + "mich": 25628, + "mich": 23029, + "micha": 2083, + "michael": 6051, + "michael": 2511, + "michaela": 41897, + "michaeljackson": 33532, + "michaels": 23868, + "michal": 47144, + "miche": 37966, + "micheal": 43709, + "michel": 5158, + "michel": 17153, + "michelangelo": 41245, + "michele": 20642, + "michelin": 26330, + "michelle": 19028, + "michelle": 8625, + "michi": 5658, + "michigan": 32344, + "michigan": 6296, + "mick": 15171, + "mick": 12592, + "mickey": 41813, + "mickey": 13053, + "micky": 43011, + "micro": 3160, + "micro": 11374, + "microbes": 44671, + "microbi": 19496, + "microbial": 30335, + "microbiology": 35348, + "microbiome": 35148, + "micron": 48742, + "microphone": 24643, + "micropoetry": 35997, + "microscope": 29114, + "microscopy": 38431, + "microsof": 42424, + "microsoft": 38650, + "microsoft": 7254, + "microwave": 24240, + "mics": 16554, + "mid": 2192, + "mid": 4734, + "midcentury": 48988, + "midd": 2983, + "midday": 23390, + "middle": 9849, + "middle": 3694, + "middleeast": 32783, + "middles": 29769, + "middlesbrough": 32436, + "middlesex": 39154, + "middleton": 23627, + "middleweight": 35829, + "midfield": 28116, + "midfielder": 13423, + "midget": 30734, + "midi": 39496, + "midi": 27326, + "midland": 24822, + "midlands": 18062, + "midnight": 35746, + "midnight": 6302, + "mids": 40821, + "midst": 24752, + "midsummer": 35234, + "midterm": 34365, + "midterms": 32015, + "midtown": 26069, + "midway": 26536, + "midweek": 29120, + "midwest": 16627, + "midwi": 44802, + "midwife": 37681, + "midwives": 42355, + "mie": 20865, + "mie": 10555, + "miento": 46482, + "mier": 36490, + "mies": 8840, + "miff": 49398, + "mig": 28743, + "might": 2727, + "mighty": 26632, + "mighty": 7815, + "mign": 41678, + "migos": 44640, + "migr": 3736, + "migra": 28186, + "migraine": 35360, + "migrant": 18902, + "migrants": 15814, + "migrate": 41804, + "migrating": 43604, + "migration": 11891, + "migu": 12279, + "miguel": 33672, + "miguel": 14436, + "miho": 46870, + "mii": 39896, + "mik": 15096, + "mik": 46203, + "mika": 28609, + "mika": 25185, + "mike": 5884, + "mike": 3178, + "mikel": 48865, + "mikequind": 33508, + "mikequindazzi": 33551, + "mikey": 34934, + "mikey": 23368, + "mikha": 30999, + "mikhail": 38327, + "miki": 48863, + "miko": 35413, + "miku": 37703, + "mil": 1469, + "mil": 12826, + "mila": 26183, + "milan": 30380, + "milan": 8552, + "milano": 18585, + "milb": 42248, + "mild": 16085, + "mildly": 49059, + "mile": 7833, + "mile": 6243, + "mileage": 30579, + "miler": 44680, + "miles": 3446, + "milestone": 13485, + "milestones": 34025, + "miley": 25336, + "miley": 14321, + "mileycyrus": 28528, + "milf": 45386, + "milford": 35840, + "mili": 16698, + "miliband": 41440, + "milit": 3715, + "militant": 33629, + "militants": 23974, + "military": 24498, + "military": 4323, + "militi": 46625, + "militia": 32114, + "milk": 13409, + "milk": 5205, + "milkshake": 29066, + "milky": 37320, + "milky": 21120, + "milkyway": 43246, + "mill": 4221, + "mill": 6637, + "milla": 49381, + "millan": 34930, + "millan": 22188, + "millar": 41851, + "mille": 34066, + "millen": 48501, + "millenni": 10406, + "millennial": 28357, + "millennials": 18804, + "millennium": 21116, + "miller": 21699, + "miller": 5733, + "milli": 5340, + "millie": 29283, + "milling": 39133, + "million": 13154, + "million": 2506, + "millionaire": 25179, + "millionaires": 47159, + "millions": 8492, + "mills": 10331, + "millwall": 35902, + "milly": 45794, + "milne": 44590, + "milner": 45230, + "milo": 24548, + "milton": 39004, + "milton": 17360, + "milwau": 13452, + "milwaukee": 14259, + "mim": 39379, + "mimi": 27086, + "mimic": 47116, + "mimic": 46519, + "mimo": 45551, + "min": 771, + "min": 3331, + "mina": 15281, + "minaj": 25136, + "minal": 40222, + "minat": 33275, + "mince": 32396, + "mind": 5890, + "mind": 2575, + "mindanao": 44228, + "minded": 21330, + "mindful": 28457, + "mindfulness": 15707, + "minding": 45337, + "minds": 9244, + "mindset": 14217, + "mindy": 46875, + "mindy": 38551, + "mine": 20149, + "mine": 3347, + "minecraft": 15678, + "mined": 48034, + "minent": 12533, + "miner": 14109, + "miner": 26572, + "mineral": 17692, + "minerals": 21169, + "miners": 22119, + "mines": 16211, + "ming": 10868, + "ming": 2107, + "mingham": 7590, + "mingle": 38437, + "mingly": 36909, + "mington": 49283, + "mington": 23119, + "minh": 48734, + "minho": 21318, + "mini": 1810, + "mini": 3954, + "miniature": 44298, + "miniature": 16377, + "miniatures": 38816, + "minic": 31522, + "minim": 10005, + "minimal": 18458, + "minimalism": 42594, + "minimalist": 26641, + "minimize": 38697, + "minimum": 12244, + "minindia": 28458, + "mining": 8473, + "minion": 28622, + "minions": 27035, + "minis": 33409, + "minis": 35976, + "minister": 25688, + "minister": 3569, + "ministerial": 33008, + "ministers": 16406, + "ministries": 27895, + "ministry": 8742, + "mink": 42017, + "minn": 45991, + "minn": 47318, + "minne": 7083, + "minneapolis": 16977, + "minneso": 9380, + "minnesota": 9968, + "minnie": 24493, + "mino": 22791, + "minogue": 44202, + "minor": 8522, + "minorities": 28119, + "minority": 16210, + "minors": 36789, + "mins": 6196, + "minsk": 46151, + "minster": 11189, + "mint": 48084, + "mint": 7506, + "minted": 49377, + "minton": 20050, + "minu": 29064, + "minus": 15358, + "minute": 28931, + "minute": 4497, + "minutes": 3056, + "mio": 26366, + "mir": 2750, + "mir": 6585, + "mira": 21665, + "mira": 22762, + "mirac": 13685, + "miracle": 49208, + "miracle": 11543, + "miracles": 23478, + "miraculous": 38671, + "mirage": 28679, + "mirai": 49060, + "mirand": 32367, + "miranda": 17590, + "mire": 38140, + "mire": 30140, + "miri": 22273, + "miriam": 30950, + "miro": 34851, + "miro": 48317, + "mirren": 47600, + "mirro": 48500, + "mirror": 29823, + "mirror": 7220, + "mirrors": 21823, + "mirza": 36440, + "mis": 866, + "mis": 11239, + "mischief": 33896, + "misconceptions": 48681, + "misconduct": 30601, + "mise": 46567, + "mise": 17267, + "miser": 33394, + "miserable": 26196, + "misery": 28360, + "mises": 24390, + "misfits": 42708, + "mish": 15494, + "mish": 20981, + "misha": 35434, + "mishra": 33042, + "misleading": 30862, + "mism": 15948, + "miso": 27657, + "miso": 33441, + "misogy": 31315, + "misogyny": 48415, + "miss": 6984, + "miss": 1526, + "missal": 38337, + "missed": 3955, + "misses": 15844, + "missi": 3008, + "missile": 14411, + "missiles": 27868, + "missin": 36209, + "missing": 23509, + "missing": 3423, + "mission": 12738, + "mission": 2406, + "missionaries": 40580, + "missionary": 27915, + "missions": 6990, + "mississ": 26483, + "mississauga": 28393, + "mississi": 11687, + "mississippi": 12232, + "missou": 30710, + "missoula": 48549, + "missouri": 11835, + "missuni": 26347, + "missuniverse": 28766, + "missy": 48105, + "missy": 31515, + "missyou": 45799, + "mist": 12610, + "mist": 11946, + "mistak": 20478, + "mistake": 11303, + "mistaken": 29182, + "mistakenly": 48494, + "mistakes": 12824, + "mister": 26949, + "mister": 18895, + "mistle": 46800, + "mistletoe": 48569, + "mistre": 42039, + "mistress": 24349, + "mists": 28636, + "misty": 18799, + "misunderstood": 41574, + "misuse": 40970, + "mit": 3303, + "mit": 4551, + "mita": 47514, + "mitage": 27964, + "mitch": 6969, + "mitch": 14150, + "mitchell": 39339, + "mitchell": 9007, + "mite": 26929, + "mith": 21752, + "mith": 17948, + "miti": 17857, + "mitigate": 42273, + "mitigation": 35514, + "mito": 38254, + "mitochondri": 42132, + "mitra": 47703, + "mits": 24086, + "mitsu": 17905, + "mitsubi": 21604, + "mitsubishi": 23030, + "mitt": 17321, + "mitt": 21341, + "mitted": 10307, + "mitting": 27938, + "mitz": 41827, + "mium": 35891, + "miwx": 43941, + "mix": 3210, + "mix": 3285, + "mixed": 29376, + "mixed": 6780, + "mixer": 17200, + "mixers": 39175, + "mixes": 19061, + "mixing": 15588, + "mixtape": 11044, + "mixture": 28286, + "miy": 25695, + "miya": 36257, + "miz": 20881, + "miz": 30795, + "mize": 19076, + "mized": 43418, + "mizing": 38715, + "mizz": 19985, + "mizzou": 26165, + "mj": 13117, + "mj": 14733, + "mk": 11581, + "mk": 8937, + "mke": 36642, + "mkt": 24814, + "ml": 3627, + "ml": 5780, + "mla": 16723, + "mlas": 48464, + "mlb": 21039, + "mlb": 7482, + "mley": 40329, + "mlg": 45801, + "mlin": 24556, + "mlk": 17941, + "mlkday": 39905, + "mlm": 37611, + "mln": 18971, + "mlp": 23620, + "mlpfi": 45475, + "mlpfim": 45640, + "mls": 13077, + "mm": 1028, + "mm": 2848, + "mma": 34140, + "mma": 6096, + "mmc": 44253, + "mme": 13105, + "mmed": 19570, + "mmer": 35717, + "mmer": 7508, + "mmers": 28128, + "mmes": 42862, + "mmi": 34147, + "mming": 21038, + "mming": 16507, + "mmings": 31357, + "mmit": 41050, + "mmj": 43015, + "mmm": 37908, + "mmm": 7641, + "mmmm": 36312, + "mmmm": 13180, + "mmmmm": 21808, + "mmmmmm": 43740, + "mmo": 30418, + "mmon": 41131, + "mmor": 36657, + "mmorpg": 39476, + "mms": 37803, + "mmva": 42666, + "mmy": 28837, + "mmy": 8722, + "mn": 5086, + "mn": 4057, + "mna": 34877, + "mnd": 44776, + "mnet": 34129, + "mnf": 41105, + "mnl": 32980, + "mnleg": 42653, + "mns": 39040, + "mnt": 21477, + "mntwins": 45448, + "mnwild": 39044, + "mnwx": 39592, + "mo": 617, + "mo": 2080, + "moa": 33174, + "moana": 43241, + "mob": 2818, + "mob": 12754, + "mobi": 9451, + "mobil": 26343, + "mobil": 29815, + "mobile": 12935, + "mobile": 3451, + "mobiles": 44302, + "mobili": 20770, + "mobility": 12546, + "mobilization": 48916, + "moby": 47219, + "moc": 41439, + "moc": 36992, + "mocha": 28425, + "mochi": 47973, + "mock": 15641, + "mock": 12759, + "mocked": 47400, + "mocking": 28692, + "mocking": 37870, + "mocks": 35142, + "mod": 6362, + "mod": 10893, + "moda": 25814, + "modal": 33157, + "mode": 20402, + "mode": 6493, + "model": 4591, + "model": 2863, + "modeled": 39527, + "modeling": 13706, + "modelling": 19946, + "models": 6176, + "moder": 2894, + "moderate": 16435, + "moderated": 27928, + "moderating": 34242, + "moderator": 32659, + "modern": 11706, + "modern": 4077, + "modernart": 34417, + "moderni": 24328, + "modernism": 39601, + "modernist": 36773, + "modernization": 47294, + "modes": 30454, + "modest": 25436, + "modi": 9047, + "modi": 7774, + "modification": 37630, + "modified": 17964, + "modo": 36820, + "mods": 23843, + "modu": 9036, + "modular": 22437, + "module": 16757, + "modules": 30575, + "moe": 38655, + "moe": 17938, + "mof": 30798, + "moff": 27160, + "mog": 42362, + "moga": 41732, + "mogadishu": 45133, + "mogul": 41320, + "moh": 18979, + "moh": 35388, + "moha": 46892, + "moham": 7923, + "mohamed": 18472, + "mohammad": 19926, + "mohammed": 16168, + "mohan": 26521, + "mohan": 23586, + "mohawk": 34942, + "mohd": 49094, + "mohsin": 48861, + "moi": 20691, + "moi": 21825, + "moil": 30349, + "moines": 32091, + "moist": 19831, + "moist": 33263, + "moisture": 20412, + "moisturi": 25942, + "moj": 34505, + "moja": 49055, + "mojito": 46830, + "mojo": 25204, + "mok": 49146, + "mol": 4246, + "mol": 31582, + "mold": 21846, + "molding": 46274, + "moldova": 47317, + "mole": 9927, + "mole": 23529, + "molecular": 19370, + "molecule": 39233, + "molecules": 35643, + "molina": 34201, + "mollie": 48203, + "molly": 24368, + "molly": 12573, + "molo": 41510, + "mology": 32255, + "molten": 46071, + "moly": 47083, + "mom": 1614, + "mom": 2543, + "moma": 33605, + "mombasa": 40340, + "moment": 12197, + "moment": 2495, + "momento": 30078, + "moments": 5251, + "momentum": 15722, + "momlife": 43825, + "momma": 14508, + "mommy": 12456, + "momo": 48490, + "momo": 25980, + "moms": 28446, + "moms": 10042, + "momsdemand": 33744, + "mon": 749, + "mon": 2173, + "mona": 19143, + "monaco": 14938, + "monaghan": 39797, + "monarch": 27235, + "monarch": 22619, + "monarchs": 36750, + "monarchy": 47503, + "monaster": 19422, + "monastery": 21850, + "monc": 34847, + "moncton": 44962, + "mond": 14522, + "mond": 4475, + "monday": 6205, + "monday": 2098, + "mondaymorning": 40089, + "mondaymotiv": 45488, + "mondaymotivation": 8198, + "mondaymotivaton": 47034, + "mondays": 13815, + "monde": 29339, + "mondo": 36207, + "monds": 20317, + "mone": 25990, + "monet": 24499, + "monetary": 26394, + "moneti": 38056, + "money": 12743, + "money": 2327, + "mong": 43566, + "monger": 38928, + "mongers": 27670, + "mongo": 20680, + "mongolia": 27144, + "mongolian": 46335, + "moni": 46851, + "monia": 31161, + "monic": 30893, + "monica": 13540, + "monit": 9014, + "monitor": 10198, + "monitored": 45828, + "monitoring": 11030, + "monitors": 30478, + "monk": 30557, + "monk": 16424, + "monkey": 29597, + "monkey": 9465, + "monkeys": 15781, + "monks": 29090, + "monmouth": 36929, + "mono": 8220, + "mono": 22537, + "monochrome": 25576, + "monogram": 39665, + "monologue": 47776, + "monopoly": 25241, + "monoxide": 49314, + "monro": 45750, + "monroe": 13625, + "mons": 19885, + "monsanto": 37592, + "monsi": 46677, + "monsieur": 48879, + "monsoon": 18872, + "monsta": 30718, + "monstax": 45631, + "monste": 47045, + "monster": 14454, + "monster": 6060, + "monsters": 11546, + "mont": 5186, + "mont": 5382, + "montag": 37202, + "montage": 32325, + "montal": 42126, + "montan": 28405, + "montana": 11436, + "monte": 8711, + "monte": 14667, + "montene": 28538, + "montenegro": 30378, + "monter": 36673, + "monterey": 23388, + "monterrey": 45254, + "montess": 43205, + "montessori": 45443, + "montgom": 13852, + "montgomery": 14951, + "month": 7680, + "month": 1924, + "monthly": 8764, + "months": 3109, + "monthsary": 42420, + "monton": 41961, + "montp": 39523, + "montre": 8434, + "montreal": 9262, + "montrose": 42347, + "monty": 43997, + "monty": 24038, + "monu": 9748, + "monument": 12019, + "monumental": 31297, + "monuments": 26916, + "mony": 4117, + "monza": 40380, + "moo": 4953, + "moo": 24626, + "mood": 42358, + "mood": 5394, + "moods": 43727, + "moody": 17170, + "moom": 36887, + "moon": 6334, + "moon": 3293, + "mooney": 37942, + "moonlight": 20001, + "moons": 29887, + "moonshine": 46706, + "moor": 14817, + "moor": 11877, + "moore": 28613, + "moore": 6708, + "moors": 32577, + "moose": 37562, + "moose": 17338, + "moot": 46895, + "mop": 33900, + "mopar": 41166, + "mor": 657, + "mor": 18614, + "mora": 29262, + "moral": 11246, + "morale": 39404, + "morales": 27117, + "morality": 34133, + "morally": 42519, + "morals": 46223, + "moran": 21557, + "moray": 44569, + "more": 5434, + "more": 750, + "morecam": 37305, + "morecambe": 43414, + "mored": 20195, + "moreland": 44135, + "moreno": 24826, + "morethan": 30889, + "morg": 34284, + "morgan": 15432, + "morgan": 6075, + "morgen": 35106, + "mori": 25710, + "mori": 29514, + "moris": 43131, + "moritz": 45594, + "morley": 40439, + "mormon": 27715, + "morn": 22393, + "mornin": 28327, + "morning": 10769, + "morning": 1119, + "mornings": 12106, + "moro": 31613, + "moroc": 11996, + "moroccan": 27546, + "morocco": 15228, + "moron": 31875, + "morons": 46477, + "morow": 40779, + "morph": 23915, + "morph": 41700, + "morphe": 38978, + "morpho": 38622, + "morrha": 43044, + "morri": 9876, + "morris": 22560, + "morris": 9090, + "morrison": 40961, + "morrison": 14094, + "morrisons": 40965, + "morrissey": 30040, + "morro": 48363, + "morrow": 21611, + "mors": 13064, + "morse": 25282, + "mort": 24257, + "mort": 30583, + "mortal": 31883, + "mortal": 14680, + "mortality": 20347, + "mortar": 27258, + "mortg": 12069, + "mortgage": 13988, + "mortgages": 45391, + "mortimer": 47836, + "morton": 20698, + "morty": 37391, + "mory": 22633, + "mos": 28658, + "mos": 9593, + "mosa": 14164, + "mosa": 23809, + "mosaic": 17506, + "mosch": 47003, + "mosco": 9840, + "moscow": 10371, + "moseley": 47080, + "moses": 18451, + "mosley": 46228, + "mosqu": 15215, + "mosque": 12694, + "mosques": 41214, + "mosquit": 39699, + "mosquito": 25083, + "mosquitoes": 41870, + "moss": 25107, + "moss": 12815, + "most": 7034, + "most": 1096, + "mostly": 8829, + "mosul": 29165, + "mot": 16352, + "mot": 15452, + "mota": 42499, + "motd": 46232, + "motel": 26191, + "moth": 33208, + "moth": 11736, + "mother": 7455, + "mother": 3050, + "motherhood": 32274, + "motherland": 46774, + "mothers": 10546, + "mothersday": 15583, + "motherwell": 48104, + "moths": 29086, + "moti": 38210, + "motif": 35373, + "motion": 32139, + "motion": 7860, + "motiv": 3183, + "motivate": 26771, + "motivated": 16521, + "motivates": 44684, + "motivating": 37720, + "motivation": 26117, + "motivation": 4193, + "motivational": 32832, + "motivational": 20472, + "motivationmonday": 28703, + "motive": 36669, + "motley": 42553, + "motm": 41192, + "moto": 10646, + "moto": 11431, + "motocross": 34562, + "motogp": 16615, + "motor": 3975, + "motor": 7659, + "motorbike": 33341, + "motorcycle": 10297, + "motorcycles": 24869, + "motoring": 44491, + "motorists": 32766, + "motorola": 33738, + "motors": 14989, + "motorsport": 18371, + "motorsports": 24264, + "motorway": 31808, + "motown": 32685, + "mott": 44570, + "mott": 21708, + "motto": 23338, + "mou": 2809, + "mou": 25289, + "moud": 37698, + "moul": 25725, + "mould": 36743, + "moulin": 47656, + "moun": 2023, + "mound": 21414, + "mount": 20553, + "mount": 5532, + "mountain": 14547, + "mountain": 3965, + "mountaine": 24841, + "mountaineer": 49255, + "mountains": 5873, + "mounted": 17897, + "mounting": 29910, + "mounts": 36767, + "mour": 9053, + "mour": 42446, + "moured": 29555, + "mourinho": 18536, + "mourn": 33592, + "mourning": 24169, + "mourns": 42811, + "mous": 24837, + "mous": 17425, + "mouse": 33032, + "mouse": 9301, + "mousse": 31869, + "moustache": 32795, + "mouth": 15152, + "mouth": 4932, + "mouths": 38518, + "mov": 23950, + "move": 16624, + "move": 2783, + "moved": 6997, + "movember": 23474, + "movement": 5208, + "movements": 19665, + "mover": 37673, + "movers": 33957, + "moves": 6880, + "movi": 1707, + "movic": 43838, + "movie": 11247, + "movie": 2016, + "movies": 4772, + "moving": 32160, + "moving": 3584, + "mow": 31006, + "mow": 36329, + "mower": 30895, + "mowing": 46424, + "mowx": 44263, + "moy": 27276, + "moy": 34205, + "moyes": 37119, + "moz": 14761, + "moz": 43738, + "mozam": 26648, + "mozambique": 28831, + "mozart": 22132, + "mozz": 26317, + "mozzarella": 27845, + "mp": 1037, + "mp": 1246, + "mpa": 30749, + "mpc": 38560, + "mpd": 33814, + "mped": 28134, + "mper": 22803, + "mpg": 39830, + "mpg": 37454, + "mpgvip": 42149, + "mph": 5306, + "mpi": 43263, + "mping": 27999, + "mple": 21139, + "mplo": 47071, + "mpls": 34298, + "mpo": 33674, + "mpp": 39570, + "mps": 5504, + "mption": 9717, + "mpton": 27448, + "mpu": 47156, + "mpus": 25864, + "mpy": 17192, + "mq": 19103, + "mqm": 24687, + "mr": 3139, + "mr": 1982, + "mra": 44568, + "mrc": 25897, + "mri": 24773, + "mrs": 25003, + "mrs": 4255, + "mrt": 30256, + "mru": 22370, + "mrw": 15303, + "ms": 3525, + "ms": 988, + "msa": 36306, + "msc": 31826, + "msc": 20529, + "msd": 25804, + "msd": 36407, + "msdhoni": 32850, + "msf": 36239, + "msg": 44430, + "msg": 10928, + "msh": 41751, + "msi": 43597, + "msi": 45278, + "msk": 38501, + "msl": 42736, + "msm": 22210, + "msn": 18824, + "msn": 41042, + "msnbc": 20245, + "mson": 27773, + "mson": 12298, + "msp": 41445, + "msp": 22318, + "mss": 42136, + "mss": 48610, + "mst": 26335, + "msu": 26763, + "msu": 17298, + "mswx": 42957, + "msy": 43919, + "mt": 4252, + "mt": 3284, + "mta": 28691, + "mtb": 48306, + "mtb": 18747, + "mtc": 42482, + "mtg": 49142, + "mtg": 13648, + "mth": 48151, + "mtl": 22135, + "mtn": 26041, + "mtn": 18953, + "mtr": 46650, + "mts": 38751, + "mtv": 8099, + "mtv": 12555, + "mtvbr": 47258, + "mtvhottest": 16751, + "mtvstars": 19948, + "mu": 670, + "mu": 6411, + "mua": 21395, + "muay": 44910, + "muaythai": 47763, + "mubarak": 17957, + "muc": 49115, + "much": 14300, + "much": 1238, + "mucha": 42191, + "muchas": 26278, + "mucho": 19864, + "muck": 44731, + "muck": 45330, + "mud": 17491, + "mud": 11673, + "mudder": 49104, + "muddy": 21524, + "mue": 44383, + "mue": 40717, + "mueller": 46863, + "mueller": 14719, + "muen": 48646, + "muer": 33840, + "muf": 33852, + "mufc": 9013, + "muffin": 22696, + "muffins": 25922, + "mufti": 44930, + "mug": 16339, + "mug": 9722, + "mugabe": 36441, + "mughal": 37508, + "mugs": 22852, + "mugshot": 40028, + "muh": 36335, + "muh": 46475, + "muham": 10043, + "muhammad": 12259, + "muir": 44650, + "muir": 24745, + "muj": 44635, + "muk": 17327, + "muk": 32600, + "mukher": 34575, + "mukherjee": 37862, + "mul": 1899, + "mul": 43193, + "mula": 40937, + "mulator": 17463, + "mulberry": 39221, + "mule": 28695, + "mull": 17313, + "mull": 35310, + "mulled": 44641, + "mullen": 30797, + "muller": 33956, + "mullet": 35010, + "mulligan": 44336, + "mullins": 41265, + "mult": 34219, + "multi": 3947, + "multi": 6400, + "multic": 21683, + "multicul": 28004, + "multicultural": 34667, + "multil": 27975, + "multimedia": 27977, + "multin": 38996, + "multinational": 46540, + "multip": 40314, + "multiplayer": 27460, + "multiple": 6470, + "multipurpose": 47665, + "multit": 27814, + "multitasking": 48684, + "mulus": 26180, + "mum": 15565, + "mum": 4030, + "mumb": 5850, + "mumbai": 24279, + "mumbai": 6971, + "mumford": 46184, + "mummy": 16301, + "mums": 17868, + "mun": 2617, + "mun": 21059, + "muna": 48424, + "munch": 23587, + "munch": 33299, + "munchies": 44324, + "munchkin": 41305, + "mund": 14244, + "mundo": 20990, + "muni": 27327, + "muni": 39795, + "munich": 13526, + "munici": 12159, + "municipal": 43667, + "municipal": 16600, + "municipality": 29987, + "munition": 32668, + "munro": 36501, + "munster": 27201, + "mup": 21966, + "muppet": 40598, + "muppets": 40187, + "mups": 42195, + "mur": 2144, + "mur": 18293, + "mura": 45176, + "mural": 12315, + "murals": 31499, + "murder": 28136, + "murder": 5787, + "murdered": 13158, + "murderer": 26956, + "murderers": 48472, + "murdering": 36055, + "murders": 22409, + "murdoch": 29037, + "murphy": 48976, + "murphy": 8914, + "murray": 31978, + "murray": 7513, + "murs": 38783, + "mus": 2198, + "mus": 8103, + "musa": 30540, + "musc": 5696, + "muscat": 33322, + "muscle": 27323, + "muscle": 9269, + "muscles": 16786, + "muscular": 30606, + "muse": 2369, + "muse": 15686, + "museo": 36457, + "muses": 48243, + "museu": 27087, + "museum": 15602, + "museum": 2786, + "museums": 15542, + "museumweek": 37996, + "mush": 7635, + "mushroom": 13011, + "mushrooms": 14730, + "musi": 15628, + "music": 4110, + "music": 1179, + "musica": 26668, + "musical": 36002, + "musical": 5173, + "musically": 48893, + "musicals": 36974, + "musichistory": 37890, + "musician": 11179, + "musicians": 12498, + "musicislife": 43311, + "musicmonday": 35887, + "musicvideo": 26764, + "musik": 32986, + "musings": 44961, + "musique": 42250, + "musk": 32143, + "musk": 19063, + "muskete": 32775, + "musketeers": 37993, + "musko": 34987, + "muskoka": 40832, + "musli": 4958, + "muslim": 43795, + "muslim": 7060, + "muslims": 10513, + "muss": 41493, + "mussels": 33393, + "must": 6783, + "must": 2048, + "mustache": 23451, + "mustaf": 23596, + "mustafa": 29000, + "mustang": 42361, + "mustang": 13309, + "mustangs": 22500, + "mustard": 15794, + "muster": 47361, + "mustread": 28978, + "mut": 12598, + "mut": 22839, + "mutant": 28384, + "mutation": 38626, + "mutations": 39651, + "mute": 31252, + "muted": 48028, + "muth": 34280, + "mutil": 39950, + "mutt": 45924, + "mutu": 17574, + "mutual": 15055, + "mutuals": 31158, + "muy": 44625, + "mv": 10580, + "mv": 8269, + "mvc": 40549, + "mvp": 8905, + "mw": 16725, + "mw": 11206, + "mwc": 24289, + "mwf": 48565, + "mx": 21947, + "mx": 9575, + "my": 1152, + "my": 607, + "mya": 31401, + "myal": 42735, + "myan": 13761, + "myanmar": 14764, + "myart": 38826, + "myco": 48362, + "mydayin": 41896, + "mydayinla": 42801, + "mydubai": 43475, + "mye": 27551, + "myel": 40084, + "myers": 15993, + "myjaps": 47939, + "myle": 43700, + "myles": 25511, + "mylife": 30537, + "mylittle": 37757, + "mylittlepony": 45107, + "myo": 16206, + "myr": 20272, + "myra": 35694, + "myri": 34972, + "myrt": 47785, + "myrtle": 27768, + "mys": 11724, + "myself": 3245, + "mysore": 44924, + "myspace": 41382, + "myster": 4669, + "mysteries": 20605, + "mysterious": 12650, + "mystery": 39828, + "mystery": 6711, + "mysti": 28711, + "mystic": 36264, + "mystic": 23722, + "mystical": 34122, + "myth": 20322, + "myth": 13878, + "mythical": 34377, + "mytho": 43857, + "mythology": 22496, + "myths": 18675, + "mz": 29509, + "mz": 33400, + "mzan": 36322, + "mzansi": 43301, + "má": 36842, + "mé": 21890, + "méxico": 46159, + "mü": 28142, + "mün": 41235, + "n": 77, + "n": 333, + "na": 1097, + "na": 1272, + "naa": 37738, + "naacp": 32176, + "nab": 6951, + "nab": 19440, + "nabe": 35111, + "naby": 24800, + "nac": 14557, + "nac": 18950, + "nach": 12168, + "nach": 43622, + "nacho": 35647, + "nachos": 32847, + "nacht": 37261, + "nacional": 38782, + "nad": 6204, + "nad": 43928, + "nada": 31683, + "nadal": 20814, + "nade": 24908, + "nadi": 30512, + "nadia": 27487, + "nadine": 23356, + "nadu": 20936, + "nae": 19374, + "naf": 16161, + "naf": 45956, + "nafta": 43123, + "nag": 6694, + "nag": 23902, + "naga": 45953, + "naga": 38997, + "nagar": 17490, + "nage": 41219, + "nago": 38349, + "nagoya": 43303, + "nagpur": 43328, + "nah": 26421, + "nah": 11129, + "nahi": 35244, + "nai": 6230, + "nai": 10692, + "naia": 31340, + "naidu": 42429, + "naija": 16326, + "naik": 34424, + "nail": 19459, + "nail": 9059, + "nailart": 43532, + "nailed": 19035, + "nails": 8469, + "nair": 27107, + "naira": 39450, + "naire": 48892, + "nairobi": 17756, + "nais": 46396, + "naissance": 44761, + "naive": 43362, + "naj": 30985, + "naji": 32589, + "nak": 9248, + "nak": 25550, + "naked": 46371, + "naked": 11478, + "naku": 39864, + "nal": 14132, + "nal": 3119, + "nale": 27198, + "nall": 32869, + "nally": 26158, + "nam": 1410, + "nam": 12344, + "nama": 39586, + "naman": 27635, + "namaste": 35549, + "name": 18160, + "name": 1981, + "named": 3194, + "nameis": 40831, + "nament": 3916, + "naments": 16540, + "names": 6130, + "namesake": 41298, + "nami": 20393, + "namibia": 23731, + "naming": 19367, + "namjoon": 31986, + "namm": 35524, + "namo": 46013, + "namo": 24854, + "nan": 4375, + "nan": 7750, + "nana": 18761, + "nanaimo": 40518, + "nancy": 21511, + "nancy": 11425, + "nand": 20435, + "nandez": 12764, + "nando": 46044, + "nang": 48148, + "nani": 27980, + "nanny": 31104, + "nano": 15835, + "nano": 22006, + "nanop": 34177, + "nanotechnology": 42235, + "nanow": 46734, + "nant": 22526, + "nantes": 47533, + "nantucket": 41573, + "nao": 39319, + "naom": 34955, + "naomi": 20173, + "nap": 6568, + "nap": 11012, + "napa": 20545, + "napier": 40875, + "napkin": 38930, + "naples": 23560, + "napo": 18715, + "napol": 20122, + "napoleon": 24969, + "napoli": 22445, + "napp": 11359, + "napping": 37657, + "naps": 31317, + "naq": 46453, + "nar": 2977, + "nar": 20145, + "nara": 33823, + "narcis": 25229, + "narcissi": 35442, + "narco": 38461, + "nard": 18216, + "nare": 34853, + "naren": 8468, + "narendr": 9807, + "narendra": 25848, + "narendramodi": 9853, + "narnia": 48693, + "narr": 11845, + "narrated": 43609, + "narrative": 15933, + "narratives": 35117, + "narrator": 46529, + "narrow": 24006, + "narrow": 16652, + "narrowly": 29747, + "naruto": 22732, + "nas": 3090, + "nas": 15250, + "nasa": 6841, + "nasal": 42853, + "nascar": 25723, + "nascar": 7868, + "nasdaq": 26629, + "nash": 6771, + "nash": 13620, + "nasheed": 49176, + "nashgrier": 33372, + "nashville": 45356, + "nashville": 8585, + "nasi": 47987, + "nasir": 47509, + "nassau": 34048, + "nasser": 43559, + "nasty": 32930, + "nasty": 8709, + "nat": 1276, + "nat": 11310, + "nata": 39392, + "natal": 28516, + "natali": 20296, + "natalia": 32978, + "natalie": 36634, + "natalie": 13595, + "natash": 48701, + "natasha": 23093, + "nate": 26643, + "nate": 7587, + "natgeo": 33009, + "natgeo": 25046, + "nath": 22203, + "nath": 19843, + "nathan": 13028, + "nathan": 9711, + "nathanfillion": 47422, + "nathaniel": 32667, + "nati": 1060, + "nati": 13384, + "natic": 44944, + "natin": 44358, + "nation": 2317, + "nation": 2670, + "national": 3126, + "national": 1362, + "nationalbestfriend": 42222, + "nationaldogday": 32227, + "nationalism": 29867, + "nationalist": 25058, + "nationality": 44451, + "nationally": 15130, + "nationalpark": 33060, + "nationalparks": 41204, + "nationals": 10784, + "nationaltrust": 34051, + "nations": 7654, + "nationwide": 13795, + "native": 20639, + "native": 4562, + "natives": 36060, + "nativity": 33988, + "natl": 39225, + "natl": 34465, + "nato": 13139, + "nats": 21106, + "natu": 2775, + "natur": 6800, + "natural": 13198, + "natural": 3288, + "naturally": 12995, + "naturals": 44686, + "nature": 9382, + "nature": 2625, + "naturelovers": 41514, + "naturephotography": 22533, + "natures": 15616, + "natureuk": 46193, + "nau": 5955, + "nau": 32878, + "naught": 41001, + "naughty": 47255, + "naughty": 15101, + "nautical": 31660, + "nav": 3413, + "nav": 25308, + "navajo": 35523, + "naval": 44725, + "naval": 13273, + "navar": 24848, + "navarro": 37104, + "nave": 42704, + "naveen": 43837, + "naver": 32534, + "navi": 16159, + "navi": 44848, + "navig": 12507, + "navigate": 24400, + "navigating": 33134, + "navigation": 20148, + "navigator": 38910, + "navis": 36377, + "navratri": 45428, + "navy": 28414, + "navy": 5598, + "naw": 16259, + "naw": 30500, + "nawaz": 49161, + "nawaz": 19523, + "nax": 38299, + "nay": 11704, + "nay": 16182, + "naya": 38917, + "nayanth": 38157, + "nayanthara": 45184, + "naz": 6363, + "naz": 35534, + "nazi": 12972, + "nazis": 21778, + "nb": 6459, + "nb": 6813, + "nba": 22524, + "nba": 5139, + "nbad": 43458, + "nbaf": 30127, + "nbafinals": 33803, + "nbap": 41956, + "nbaplayoffs": 43860, + "nbat": 46291, + "nbc": 9352, + "nbc": 8799, + "nbd": 24526, + "nbl": 42652, + "nc": 5021, + "nc": 4911, + "nca": 6921, + "ncaa": 9418, + "ncbd": 47221, + "ncc": 33195, + "ncc": 36686, + "ncds": 47573, + "ncfc": 31274, + "ncis": 33617, + "ncpol": 40562, + "ncr": 38474, + "ncs": 42689, + "nct": 27723, + "nct": 20319, + "ncwx": 36166, + "nd": 5625, + "nd": 1764, + "nda": 32862, + "ndc": 47564, + "ndi": 48229, + "ndp": 19257, + "nds": 31347, + "ndtv": 26261, + "ne": 557, + "ne": 1422, + "nea": 24068, + "neal": 33652, + "neal": 16730, + "near": 11296, + "near": 2252, + "nearby": 13314, + "nearest": 18985, + "nearing": 26571, + "nearly": 4816, + "nears": 37710, + "neat": 43201, + "neat": 15465, + "neath": 18315, + "neau": 31559, + "neb": 40209, + "nebra": 13371, + "nebraska": 14565, + "nebu": 49295, + "nebula": 22532, + "nec": 25109, + "nec": 22992, + "necess": 6961, + "necessarily": 25853, + "necessary": 8955, + "necessities": 43483, + "necessity": 33163, + "neck": 6066, + "neck": 6906, + "necklace": 7385, + "necklaces": 32276, + "necks": 29701, + "nectar": 33683, + "ned": 16030, + "ned": 1369, + "nederland": 49058, + "nee": 20494, + "nee": 10601, + "need": 3229, + "need": 1262, + "needed": 4049, + "needing": 22894, + "needle": 44490, + "needle": 19886, + "needles": 27250, + "needless": 39984, + "needs": 2536, + "needy": 30150, + "neel": 33092, + "neel": 46043, + "neer": 34245, + "nees": 47248, + "neet": 46362, + "neg": 5513, + "negan": 42623, + "negative": 8869, + "negatively": 40254, + "negativity": 34658, + "neglec": 18827, + "neglect": 33680, + "neglected": 31893, + "negli": 32594, + "negligence": 45658, + "negoti": 10216, + "negotiate": 32969, + "negotiating": 35510, + "negotiation": 36504, + "negotiations": 20433, + "negr": 42190, + "negro": 26554, + "neh": 40416, + "neh": 41697, + "neha": 44463, + "nehru": 30316, + "nei": 9366, + "neigh": 4061, + "neighb": 6534, + "neighbor": 7759, + "neighbor": 14485, + "neighborhood": 9471, + "neighborhoods": 26713, + "neighboring": 44754, + "neighbors": 13037, + "neighbour": 15858, + "neighbour": 23719, + "neighbourhood": 20312, + "neighbours": 17594, + "neil": 13591, + "neil": 8030, + "neilhimself": 45682, + "neill": 19324, + "neither": 14398, + "nek": 47727, + "neko": 47066, + "nel": 5476, + "nel": 2693, + "nell": 27081, + "nell": 8117, + "nelly": 21166, + "nels": 19296, + "nelson": 24774, + "nelson": 8586, + "nem": 45153, + "neman": 48553, + "neme": 30993, + "nemesis": 37811, + "nemo": 30441, + "nen": 17817, + "nen": 15451, + "nene": 44167, + "neo": 14562, + "neo": 11017, + "neon": 21043, + "neon": 13919, + "neonatal": 46464, + "neop": 49069, + "nep": 20739, + "nep": 41960, + "nepal": 25597, + "nepal": 10066, + "nepali": 47579, + "neph": 27926, + "nephe": 41810, + "nephew": 11689, + "nephews": 43747, + "nephro": 43054, + "neptune": 30566, + "ner": 2064, + "ner": 998, + "nerd": 24452, + "nerd": 12273, + "nerds": 22609, + "nerdy": 33124, + "nered": 17583, + "nerf": 42914, + "nering": 20226, + "nero": 29048, + "ners": 2129, + "nerve": 18571, + "nerves": 27813, + "nervous": 13928, + "nery": 48597, + "nes": 5457, + "nes": 4980, + "nesburg": 27159, + "nese": 32220, + "ness": 7187, + "ness": 1294, + "nesses": 20107, + "nessy": 32939, + "nest": 20302, + "nest": 8719, + "nesting": 28860, + "nestle": 43967, + "nestled": 38107, + "nests": 41133, + "net": 1851, + "net": 2315, + "netany": 23137, + "netanyahu": 23583, + "netball": 19761, + "netes": 44335, + "netfli": 6304, + "netflix": 35325, + "netflix": 6600, + "nether": 9946, + "netherlands": 11060, + "neti": 43980, + "netneutrality": 47794, + "nets": 8582, + "nett": 23403, + "nett": 6975, + "nette": 13271, + "network": 23285, + "network": 3304, + "networking": 9818, + "networks": 10004, + "neu": 3855, + "neu": 43342, + "neue": 45764, + "neur": 19001, + "neur": 31976, + "neural": 26388, + "neuro": 7401, + "neuro": 36000, + "neurological": 41718, + "neurology": 43197, + "neurons": 40442, + "neuroscience": 23381, + "neutr": 17207, + "neutral": 17011, + "neutrality": 26511, + "neutron": 44056, + "nev": 10236, + "nev": 43645, + "neva": 43304, + "nevada": 13499, + "neve": 44099, + "neve": 44023, + "never": 6746, + "never": 1426, + "neveragain": 45053, + "neverforget": 19242, + "nevergiveup": 42497, + "neverland": 41483, + "nevertheless": 48355, + "nevertrump": 47494, + "neville": 19269, + "nevis": 43670, + "new": 1218, + "new": 686, + "newark": 20240, + "newbie": 45427, + "newborn": 18320, + "newbury": 34169, + "newcastle": 41955, + "newcastle": 9302, + "newcomer": 30648, + "newcomers": 44037, + "newe": 40068, + "newell": 41436, + "newer": 33099, + "newest": 4990, + "newfound": 25250, + "newfoundland": 28079, + "newh": 18546, + "newin": 31911, + "newjersey": 32621, + "newly": 42186, + "newly": 7056, + "newman": 15815, + "newmarket": 38617, + "newmexico": 35238, + "newmusic": 32510, + "newmusic": 17201, + "newor": 25969, + "neworleans": 31205, + "newport": 42580, + "newport": 14846, + "newprofile": 14633, + "newprofilepic": 14754, + "newrelease": 34793, + "news": 6216, + "news": 1120, + "newsat": 43979, + "newsc": 28656, + "newscast": 45031, + "newsle": 10727, + "newsletter": 11069, + "newsnow": 48650, + "newsp": 7109, + "newspaper": 8786, + "newspapers": 22423, + "newsroom": 23200, + "newt": 37224, + "newton": 33122, + "newton": 12606, + "newtown": 31747, + "newyear": 22161, + "newyear": 12999, + "newyearseve": 37587, + "newyork": 18140, + "newyork": 10454, + "newyorkcity": 30460, + "newyorker": 39732, + "newzealand": 21117, + "nex": 6897, + "nex": 39720, + "next": 12434, + "next": 1131, + "nextgen": 41933, + "nexus": 19053, + "ney": 3857, + "ney": 1438, + "neymar": 21878, + "neys": 12616, + "nez": 27388, + "nf": 15195, + "nf": 25643, + "nfamily": 20098, + "nfc": 23695, + "nffc": 27893, + "nfl": 11219, + "nfl": 4691, + "nfldraft": 25002, + "ng": 10352, + "ng": 5215, + "nga": 35477, + "ngc": 29046, + "ngo": 38740, + "ngo": 24821, + "ngos": 34627, + "nguyen": 29947, + "nh": 3760, + "nh": 10803, + "nhc": 44817, + "nhl": 12290, + "nhl": 8167, + "nhlbruins": 39081, + "nhljets": 49357, + "nhm": 39483, + "nhpolitics": 36125, + "nhq": 42368, + "nhra": 30052, + "nhs": 23282, + "nhs": 7695, + "ni": 697, + "ni": 3256, + "nia": 3098, + "niag": 18071, + "niagar": 39298, + "niagara": 18965, + "niall": 41354, + "niall": 8327, + "niallo": 22855, + "niallofficial": 23084, + "niam": 39347, + "nian": 46003, + "nib": 31049, + "nic": 2109, + "nic": 6651, + "nica": 29040, + "nicar": 25119, + "nicaragua": 28423, + "nice": 28386, + "nice": 1805, + "nicely": 12303, + "nicer": 29488, + "nicest": 22967, + "niche": 25279, + "nichol": 7668, + "nicholas": 39814, + "nicholas": 13148, + "nicholls": 38846, + "nichols": 22730, + "nicholson": 28745, + "nick": 4209, + "nick": 4253, + "nickel": 22034, + "nickelo": 28668, + "nickelodeon": 33279, + "nicki": 17738, + "nickimin": 27390, + "nickiminaj": 27593, + "nickjonas": 43862, + "nickname": 24731, + "nicknamed": 45190, + "nicks": 15049, + "nicky": 28893, + "nicky": 22091, + "nico": 20850, + "nico": 17779, + "nicol": 9919, + "nicol": 48274, + "nicola": 21791, + "nicolas": 43813, + "nicolas": 18918, + "nicole": 21246, + "nicole": 10000, + "nicot": 45099, + "nicotine": 46697, + "nie": 9524, + "nie": 3501, + "niece": 12795, + "nieces": 44877, + "niel": 19109, + "niel": 26837, + "niels": 37154, + "nielsen": 28372, + "nier": 13014, + "nies": 10586, + "niest": 15007, + "nieu": 29781, + "nific": 4748, + "nifty": 25604, + "nig": 27933, + "nig": 28099, + "nigan": 48516, + "nigel": 33919, + "nigel": 15153, + "niger": 4524, + "niger": 29920, + "nigeri": 40913, + "nigeria": 6106, + "nigerian": 12167, + "nigerians": 25358, + "nigh": 13525, + "nigh": 48157, + "night": 3870, + "night": 930, + "nightclub": 20418, + "nighter": 41349, + "nighting": 36211, + "nightingale": 40696, + "nightlife": 28823, + "nightly": 28868, + "nightmare": 12867, + "nightmares": 24032, + "nightout": 44257, + "nights": 4296, + "nighttime": 38147, + "nightw": 39956, + "nih": 25783, + "nik": 5126, + "nik": 13705, + "nike": 16300, + "nike": 5783, + "nikeplus": 43154, + "niki": 36136, + "nikita": 37118, + "nikk": 38596, + "nikki": 23156, + "nikki": 16689, + "niko": 43771, + "nikol": 27430, + "nikola": 42146, + "nikon": 25488, + "nikon": 13849, + "nikov": 43960, + "nil": 16852, + "nil": 35030, + "nile": 24252, + "nim": 30402, + "nim": 42093, + "nima": 42586, + "nin": 5794, + "nin": 14145, + "nina": 13891, + "nine": 16213, + "nine": 7330, + "ninety": 48214, + "ning": 6050, + "ning": 762, + "ningham": 23395, + "ningly": 43537, + "nings": 4588, + "nington": 26214, + "ninj": 23225, + "ninja": 11969, + "ninjas": 42796, + "nino": 25633, + "ninten": 6184, + "nintendo": 13969, + "nintendo": 7886, + "nintendoswitch": 16404, + "ninth": 22770, + "nip": 33889, + "nip": 22333, + "nipp": 24634, + "nipple": 45987, + "nipples": 44774, + "nippon": 47960, + "nips": 49241, + "nir": 15503, + "nir": 40057, + "nireland": 45763, + "niro": 47373, + "nirvana": 28300, + "nis": 5609, + "nis": 3786, + "nish": 19834, + "nish": 13256, + "nished": 24141, + "nishi": 32386, + "nishings": 49247, + "nison": 45700, + "niss": 39043, + "nissan": 37635, + "nissan": 11082, + "nist": 17782, + "nister": 36640, + "nit": 4087, + "nit": 19011, + "nite": 8427, + "niti": 43964, + "niti": 45355, + "nitin": 37529, + "nitro": 30726, + "nitrogen": 30706, + "niture": 7840, + "nity": 12707, + "niu": 48187, + "niv": 47300, + "niversary": 29643, + "nix": 48552, + "nix": 32278, + "nixon": 20671, + "nj": 8343, + "nj": 6672, + "njcaa": 48992, + "njpw": 38992, + "nk": 22708, + "nk": 17456, + "nko": 36353, + "nl": 12057, + "nl": 7655, + "nli": 37502, + "nlp": 35680, + "nlwx": 49260, + "nm": 15956, + "nm": 11370, + "nmd": 43331, + "nme": 40454, + "nmwx": 47967, + "nn": 8947, + "nn": 12925, + "nnn": 26277, + "nnnn": 41420, + "no": 578, + "no": 871, + "noaa": 27557, + "noah": 28806, + "noah": 11519, + "nobel": 33742, + "nobel": 15605, + "nobelprize": 46074, + "noble": 29430, + "noble": 12051, + "nobody": 7009, + "noc": 16988, + "noc": 44420, + "nocchi": 46359, + "noch": 38672, + "noche": 29689, + "noches": 44166, + "nock": 16993, + "noctur": 26291, + "nocturnal": 41738, + "nod": 18648, + "nodapl": 39079, + "node": 31434, + "node": 24871, + "nodejs": 39262, + "nodes": 40534, + "noel": 38406, + "noel": 17496, + "nof": 29505, + "noff": 46979, + "nofilter": 16418, + "nog": 31157, + "noh": 40775, + "noi": 43115, + "noi": 39889, + "noida": 33404, + "noir": 39291, + "noir": 12953, + "nois": 22057, + "noise": 41018, + "noise": 9307, + "noises": 31575, + "noisse": 45686, + "noisy": 33495, + "nokia": 17731, + "nol": 8055, + "nola": 13289, + "nolan": 17323, + "nold": 40322, + "nole": 34654, + "noles": 40569, + "nollywood": 43145, + "nology": 42221, + "nom": 2981, + "nom": 12799, + "nomad": 27849, + "noman": 45592, + "nomin": 5643, + "nominate": 17122, + "nominated": 8710, + "nominating": 45747, + "nomination": 14136, + "nominations": 17124, + "nominee": 14122, + "nominees": 17873, + "nomnom": 26962, + "nomore": 35126, + "noms": 35706, + "non": 4282, + "non": 3353, + "none": 29644, + "none": 8906, + "nonetheless": 39675, + "nonfiction": 31654, + "nonprofit": 19315, + "nonprofits": 37935, + "nonsense": 19136, + "nonstop": 30300, + "nont": 25207, + "noo": 6759, + "noo": 46672, + "noodle": 19521, + "noodles": 15782, + "nook": 30088, + "noon": 37693, + "noon": 2347, + "noor": 46978, + "noor": 31323, + "nope": 15625, + "nor": 1062, + "nor": 6190, + "nora": 25890, + "norcal": 41970, + "nord": 19261, + "nord": 36067, + "nordic": 36439, + "nordic": 20734, + "nordstrom": 38562, + "norfolk": 30232, + "norfolk": 12202, + "norm": 10990, + "norm": 22457, + "norma": 35757, + "normal": 28748, + "normal": 5967, + "normali": 45157, + "normally": 15870, + "norman": 22027, + "norman": 11338, + "normandy": 23840, + "normani": 44596, + "norms": 33011, + "norris": 21814, + "norse": 36559, + "norte": 35638, + "north": 3468, + "north": 2188, + "northampton": 49246, + "northampton": 26175, + "northan": 37081, + "northbound": 24228, + "northcarolina": 43386, + "northe": 24675, + "northeast": 42673, + "northeast": 13009, + "northeastern": 28297, + "northeasthour": 42869, + "norther": 26908, + "northern": 17210, + "northern": 5049, + "northernlights": 48940, + "northkorea": 38495, + "northside": 45957, + "northumber": 22295, + "northumberland": 22922, + "northwales": 49371, + "northwest": 12894, + "northwestern": 23685, + "norton": 18032, + "norway": 8780, + "norwe": 14414, + "norwegian": 15971, + "norwich": 37629, + "norwich": 15812, + "norwood": 37889, + "nos": 13420, + "nose": 24192, + "nose": 8231, + "noses": 48163, + "nostal": 12076, + "nostalgia": 16622, + "nostalgic": 24468, + "not": 2534, + "not": 783, + "notable": 22023, + "notch": 19476, + "notdead": 42059, + "note": 10910, + "note": 3246, + "notebook": 16365, + "notebooks": 37623, + "noted": 22501, + "notes": 5795, + "nothin": 24291, + "nothing": 28412, + "nothing": 2586, + "noti": 10686, + "notic": 6915, + "notice": 6683, + "noticeable": 40857, + "noticed": 9324, + "notices": 33459, + "noticias": 47759, + "noticing": 37571, + "notification": 22512, + "notifications": 23169, + "notified": 39454, + "noting": 38649, + "notion": 37856, + "notjust": 33212, + "notjustlakes": 45803, + "notmy": 39301, + "noto": 29878, + "noton": 48258, + "notor": 21711, + "notori": 44065, + "notorious": 22489, + "notre": 24397, + "notre": 15306, + "notredame": 34077, + "notsorry": 34361, + "nott": 9333, + "nott": 34989, + "notte": 47308, + "nottingham": 12852, + "notts": 25598, + "nou": 8751, + "nou": 30953, + "noun": 33663, + "nouri": 23796, + "nourish": 46025, + "nourished": 48354, + "nous": 29485, + "nouveau": 29948, + "nouvel": 34215, + "nov": 2264, + "nov": 4293, + "nova": 11236, + "novak": 26465, + "novasco": 33785, + "novascotia": 34744, + "novation": 39753, + "nove": 30507, + "novel": 15044, + "novel": 6080, + "novelist": 27314, + "novella": 42770, + "novels": 16040, + "novelty": 37750, + "november": 3680, + "nover": 37465, + "novi": 47957, + "novice": 33743, + "novo": 27504, + "novo": 36581, + "now": 2040, + "now": 692, + "nowadays": 26155, + "nowhere": 14108, + "nowplaying": 3708, + "nowwatching": 30852, + "nox": 27406, + "noxi": 39304, + "noxious": 42833, + "noy": 32787, + "np": 18205, + "np": 6314, + "npa": 42378, + "npc": 33966, + "npr": 39941, + "npr": 24078, + "nps": 22025, + "npt": 47231, + "nr": 6574, + "nr": 9713, + "nra": 17286, + "nrc": 45786, + "nrf": 47982, + "nrg": 48662, + "nrl": 27142, + "nrl": 18127, + "ns": 12405, + "ns": 1373, + "nsa": 23004, + "nsc": 32792, + "nsd": 36659, + "nsf": 34180, + "nsfw": 19847, + "nsi": 47824, + "nsw": 21301, + "nsw": 11693, + "nswpol": 44434, + "nt": 10902, + "nt": 3207, + "ntr": 30845, + "nts": 43775, + "ntt": 22859, + "ntv": 24807, + "ntv": 45304, + "nu": 1156, + "nu": 9444, + "nucle": 25693, + "nuclear": 34136, + "nuclear": 7279, + "nude": 16630, + "nudes": 32122, + "nue": 22834, + "nuestra": 45649, + "nuestro": 38590, + "nuev": 47861, + "nueva": 48810, + "nuevo": 30265, + "nufc": 15720, + "nuff": 37324, + "nug": 13471, + "nugent": 47457, + "nugget": 25448, + "nuggets": 18970, + "nuh": 45950, + "nuit": 38815, + "nuk": 39228, + "nuke": 39399, + "nul": 29358, + "null": 47376, + "num": 17896, + "num": 30534, + "numb": 34639, + "numb": 39427, + "number": 44078, + "number": 2842, + "numbered": 25975, + "numbers": 6121, + "numer": 11442, + "numerous": 17082, + "numis": 39100, + "nun": 12511, + "nun": 28540, + "nunavut": 48626, + "nunes": 40697, + "nuns": 44061, + "nup": 46757, + "nur": 3920, + "nur": 33493, + "nure": 42480, + "nurse": 37547, + "nurse": 10058, + "nursery": 15540, + "nurses": 12938, + "nursing": 11126, + "nurture": 38865, + "nurturing": 45229, + "nus": 25157, + "nus": 18239, + "nut": 10358, + "nut": 6491, + "nutcracker": 36733, + "nutella": 27312, + "nutr": 6198, + "nutri": 15470, + "nutrient": 32900, + "nutrients": 24668, + "nutriti": 17978, + "nutrition": 41546, + "nutrition": 7989, + "nutritional": 26457, + "nutritious": 30387, + "nuts": 8644, + "nutshell": 26659, + "nutty": 39846, + "nv": 17217, + "nv": 16985, + "nvi": 22847, + "nvidia": 27325, + "nw": 7826, + "nw": 7030, + "nwa": 34237, + "nwo": 40976, + "nws": 23333, + "nws": 30998, + "nwsl": 48394, + "nwt": 25029, + "nx": 18810, + "nx": 16997, + "nxt": 35037, + "nxt": 17804, + "ny": 1383, + "ny": 1350, + "nya": 24165, + "nyc": 13304, + "nyc": 2832, + "nycc": 27187, + "nycfc": 47497, + "nye": 40723, + "nye": 13416, + "nyfw": 21089, + "nyk": 46841, + "nylon": 25915, + "nyo": 41534, + "nyo": 44586, + "nypd": 42293, + "nypd": 18279, + "nyr": 32538, + "nyrd": 47936, + "nys": 36375, + "nys": 23423, + "nyse": 32650, + "nyt": 46311, + "nyt": 12816, + "nytimes": 13772, + "nyu": 43143, + "nyu": 31355, + "nz": 10142, + "nz": 7082, + "o": 78, + "o": 334, + "oa": 11994, + "oahu": 37790, + "oak": 6010, + "oak": 7221, + "oakland": 42663, + "oakland": 12077, + "oakley": 27810, + "oaks": 16734, + "oakville": 38500, + "oasis": 18185, + "oat": 20095, + "oat": 34132, + "oates": 47094, + "oath": 20108, + "oatmeal": 26374, + "oats": 24150, + "oax": 43090, + "oaxaca": 47818, + "ob": 1411, + "ob": 14908, + "oba": 42902, + "oba": 15147, + "obam": 13174, + "obama": 4276, + "obamacare": 18005, + "obe": 11897, + "obe": 29117, + "obedience": 48921, + "ober": 15284, + "obese": 41757, + "obesity": 19499, + "obey": 26926, + "obi": 21454, + "obi": 18414, + "obile": 20513, + "obitu": 39218, + "obituary": 43580, + "objec": 7970, + "object": 14115, + "objective": 23663, + "objectives": 30238, + "objects": 13770, + "obl": 31452, + "oblast": 42672, + "obli": 11416, + "obligation": 34473, + "obligations": 38232, + "obligatory": 35020, + "oblivion": 45323, + "obo": 46001, + "obo": 26618, + "obrien": 31946, + "obs": 39162, + "obsc": 20392, + "obscure": 33337, + "obse": 8433, + "observ": 9050, + "observation": 20250, + "observations": 27409, + "observatory": 21236, + "observe": 23217, + "observed": 21267, + "observer": 22077, + "observers": 47544, + "observing": 28359, + "obsessed": 9744, + "obsession": 15718, + "obsi": 47323, + "obsole": 35561, + "obsolete": 40628, + "obst": 29398, + "obstac": 24075, + "obstacle": 29751, + "obstacles": 24480, + "obste": 49103, + "obstru": 44876, + "obstruc": 38762, + "obstruction": 40240, + "obtain": 26555, + "obtained": 29322, + "obvious": 13959, + "obviously": 10068, + "oc": 1566, + "oc": 6603, + "oca": 31120, + "ocal": 38148, + "occ": 43940, + "occa": 8530, + "occasion": 12280, + "occasional": 33059, + "occasionally": 32479, + "occasions": 26154, + "occer": 20804, + "occi": 42994, + "occu": 7863, + "occult": 42529, + "occup": 11152, + "occupation": 18624, + "occupational": 30644, + "occupied": 17271, + "occupy": 22453, + "occupy": 24210, + "occur": 11264, + "occur": 21813, + "occurred": 19850, + "occurrence": 40615, + "occurring": 31335, + "occurs": 26563, + "ocd": 35904, + "oce": 3509, + "ocean": 12941, + "ocean": 4918, + "oceans": 16792, + "och": 29334, + "och": 32011, + "oche": 33045, + "oci": 9891, + "ocity": 46039, + "ock": 33579, + "ock": 21313, + "ocks": 22410, + "oclock": 36274, + "oco": 32553, + "ocon": 33090, + "ocr": 45813, + "ocre": 40320, + "ocs": 27297, + "oct": 4565, + "octa": 23444, + "octag": 37768, + "octagon": 49167, + "octane": 43040, + "octavia": 47416, + "octo": 31032, + "october": 3481, + "octopus": 22327, + "ocu": 22709, + "oculus": 30082, + "od": 4886, + "od": 9719, + "oda": 24777, + "oday": 41954, + "odd": 15525, + "odd": 11387, + "oddly": 34213, + "odds": 11555, + "ode": 19125, + "ode": 19639, + "odell": 41556, + "odessa": 43574, + "odi": 12223, + "odi": 18853, + "odin": 35175, + "odisha": 15737, + "odo": 49188, + "odo": 40993, + "odor": 39509, + "odu": 35095, + "odu": 39904, + "odyssey": 19991, + "oe": 24251, + "oe": 11667, + "oec": 24288, + "oecd": 30816, + "oem": 29650, + "oes": 3643, + "of": 684, + "of": 539, + "ofa": 29774, + "ofc": 19877, + "ofe": 30000, + "ofer": 47322, + "off": 892, + "off": 1007, + "offe": 8261, + "offee": 34059, + "offen": 7231, + "offence": 34594, + "offences": 33972, + "offended": 30765, + "offender": 48294, + "offenders": 35878, + "offense": 15253, + "offensive": 11037, + "offer": 20607, + "offer": 3271, + "offered": 9395, + "offering": 6896, + "offerings": 24535, + "offers": 4679, + "offic": 3276, + "office": 18033, + "office": 2171, + "officeof": 38750, + "officeofrg": 47100, + "officer": 4683, + "officers": 6335, + "offices": 10933, + "offici": 1401, + "official": 5768, + "official": 1868, + "officially": 4226, + "officials": 7658, + "officiel": 26548, + "offl": 16851, + "offline": 22724, + "offro": 32198, + "offroad": 37173, + "offs": 23987, + "offseason": 25485, + "offset": 28843, + "offshore": 15496, + "offside": 49347, + "offspring": 38635, + "offthe": 38189, + "ofi": 36692, + "ofi": 49090, + "oficial": 18061, + "oft": 16693, + "oftball": 39768, + "often": 4864, + "ofthe": 7592, + "oftheday": 6988, + "oftheweek": 20654, + "oftheyear": 33975, + "og": 11542, + "og": 8555, + "oga": 47312, + "ogden": 42011, + "ogil": 39013, + "ography": 22399, + "ogue": 24761, + "ogun": 48970, + "oh": 5648, + "oh": 1779, + "ohana": 48330, + "ohh": 23076, + "ohhh": 27697, + "ohhhh": 40201, + "ohi": 5207, + "ohio": 18951, + "ohio": 6155, + "ohiostate": 41324, + "ohl": 45547, + "ohl": 41095, + "ohmy": 29758, + "ohn": 48043, + "ohs": 39542, + "ohwx": 47993, + "oi": 27357, + "oi": 13934, + "oic": 45554, + "oid": 14758, + "oids": 21847, + "oil": 11973, + "oil": 2870, + "oiland": 32316, + "oilandgas": 34130, + "oilers": 21627, + "oilpainting": 34279, + "oils": 17886, + "oily": 47550, + "oir": 48079, + "oir": 37113, + "ois": 23262, + "oit": 18453, + "oitnb": 34865, + "oj": 30986, + "oj": 34553, + "ok": 1944, + "ok": 2481, + "oka": 42258, + "oka": 19092, + "okan": 41263, + "okanagan": 43233, + "okay": 4917, + "okc": 42418, + "okc": 18357, + "oke": 26636, + "oke": 23598, + "oki": 20390, + "okin": 30687, + "okinawa": 35877, + "okla": 9431, + "oklahoma": 10170, + "oko": 26892, + "oko": 26095, + "okstate": 36356, + "oktoberfest": 32026, + "oku": 45010, + "oku": 43829, + "okwx": 27336, + "ol": 562, + "ol": 2985, + "ola": 20499, + "ola": 3373, + "olaf": 39709, + "olan": 48489, + "olan": 24227, + "oland": 26452, + "olas": 40800, + "old": 4931, + "old": 896, + "olde": 37731, + "older": 7700, + "oldest": 9285, + "oldham": 29929, + "oldie": 35280, + "oldies": 36278, + "oldman": 48614, + "olds": 8580, + "oldschool": 44384, + "oldschool": 25133, + "oldsmobile": 45396, + "ole": 9089, + "ole": 1947, + "oled": 46768, + "oler": 24069, + "oles": 16962, + "olf": 16346, + "olga": 34779, + "oli": 3811, + "oli": 8810, + "olic": 31341, + "oligar": 46185, + "olim": 47769, + "olin": 37823, + "olin": 18283, + "olina": 34711, + "oline": 17441, + "oling": 38033, + "olini": 36040, + "olis": 49397, + "olithic": 35574, + "olive": 22486, + "olive": 9898, + "oliver": 22882, + "oliver": 9261, + "olives": 27149, + "olivi": 20773, + "olivia": 11697, + "olivier": 23891, + "oll": 32270, + "oll": 15510, + "olla": 31908, + "ollie": 24434, + "olls": 42697, + "olly": 23998, + "olo": 14628, + "olo": 7606, + "ological": 12345, + "ologist": 23442, + "ologists": 30912, + "ology": 4627, + "olor": 29245, + "olph": 25077, + "ols": 2236, + "olsen": 26307, + "olson": 28046, + "olt": 46252, + "olu": 16502, + "olu": 46302, + "olulu": 27645, + "oly": 20323, + "oly": 24823, + "olym": 3594, + "olympi": 13597, + "olympia": 23965, + "olympiad": 47694, + "olympian": 25420, + "olympians": 44583, + "olympic": 26099, + "olympic": 6388, + "olympics": 7629, + "olympus": 30960, + "om": 547, + "om": 3932, + "oma": 44603, + "oma": 5358, + "omaha": 16509, + "oman": 22088, + "oman": 10871, + "omar": 19488, + "omar": 13367, + "omars": 37099, + "omas": 36023, + "omat": 40788, + "omb": 34447, + "ombe": 35967, + "omd": 49346, + "ome": 3693, + "ome": 5832, + "omed": 16835, + "omega": 13465, + "omelette": 38789, + "omen": 9969, + "omen": 25469, + "oment": 43683, + "omeo": 39844, + "omer": 24087, + "omer": 17902, + "omes": 25736, + "ometer": 20060, + "ometric": 38702, + "omez": 12541, + "omf": 47496, + "omfg": 12523, + "omg": 35233, + "omg": 3186, + "omi": 24097, + "omi": 10341, + "omic": 40536, + "omic": 12793, + "omics": 15138, + "omile": 46915, + "omin": 16457, + "omination": 42571, + "oming": 10796, + "ominous": 40914, + "omni": 18793, + "omni": 39489, + "omnibus": 44760, + "omnic": 48383, + "omo": 14478, + "omo": 11066, + "omon": 48758, + "omor": 29431, + "oms": 3770, + "omusic": 38965, + "omy": 40805, + "omy": 6884, + "on": 521, + "on": 525, + "ona": 2687, + "onair": 29511, + "onal": 918, + "onboard": 21689, + "once": 16331, + "once": 2654, + "onceupon": 28122, + "onceuponatime": 33505, + "onco": 46700, + "oncology": 24593, + "ond": 27918, + "ond": 2636, + "onda": 32643, + "onday": 29864, + "onde": 44532, + "ondo": 29529, + "ondon": 42043, + "ondon": 11851, + "one": 1980, + "one": 637, + "onec": 27746, + "oned": 28012, + "oned": 4698, + "onedirection": 16245, + "onee": 44433, + "oneill": 44808, + "onelove": 47417, + "onent": 12147, + "onents": 11709, + "oneof": 48478, + "onep": 20440, + "onepiece": 43153, + "oneplus": 25981, + "oner": 30055, + "oner": 6071, + "oners": 12324, + "ones": 20757, + "ones": 1575, + "oneself": 46874, + "onesie": 33237, + "oness": 25379, + "onet": 36058, + "oneteam": 41094, + "onetsy": 33392, + "onew": 43848, + "onews": 18696, + "onex": 49116, + "oney": 44498, + "oney": 9408, + "onf": 41790, + "onfox": 29874, + "ong": 2787, + "ong": 846, + "onga": 30259, + "ongchang": 35071, + "ongi": 21754, + "ongo": 31226, + "ongoing": 10393, + "ongs": 12143, + "oni": 4385, + "oni": 8048, + "onia": 8001, + "onial": 27599, + "onian": 21090, + "onic": 15838, + "onic": 3711, + "onica": 14631, + "onics": 9779, + "onie": 35249, + "onies": 22601, + "onimo": 41271, + "oning": 5197, + "onion": 10985, + "onions": 15255, + "onist": 10099, + "onists": 19659, + "onix": 27370, + "onized": 43657, + "onlin": 31103, + "online": 12940, + "online": 2027, + "onlinemarketing": 41820, + "onlineshopping": 38587, + "only": 11646, + "only": 1033, + "onlyin": 32947, + "onna": 25438, + "onna": 35458, + "onnaise": 48934, + "onne": 23466, + "onnell": 45613, + "ono": 28165, + "ono": 14388, + "onom": 48014, + "onomy": 36873, + "onpoli": 20708, + "ons": 26076, + "ons": 708, + "onsale": 36324, + "onset": 30527, + "onsite": 37336, + "onstage": 21821, + "onstorm": 49333, + "ont": 34303, + "ont": 11157, + "ontari": 6739, + "ontario": 42766, + "ontario": 7436, + "onte": 34723, + "onthe": 12241, + "onther": 46563, + "ontheroad": 47516, + "onthisday": 6862, + "onto": 11745, + "onto": 3141, + "ontology": 37364, + "ontour": 32155, + "onu": 44142, + "onward": 34827, + "onwards": 20682, + "ony": 9490, + "ony": 2926, + "onym": 11483, + "onymous": 13038, + "onyx": 31353, + "oo": 574, + "oo": 2822, + "ood": 16429, + "ood": 738, + "oodle": 45289, + "oods": 44660, + "oof": 42270, + "ooh": 16806, + "ook": 22326, + "ook": 8394, + "ooks": 31082, + "ool": 37702, + "ool": 929, + "oom": 22786, + "oom": 15002, + "oomf": 40607, + "oon": 35651, + "oon": 7100, + "ooo": 9571, + "oooh": 28927, + "oooo": 4002, + "oooo": 13643, + "ooooo": 12532, + "oooooo": 43590, + "oooooo": 20372, + "ooooooo": 30859, + "oooooooo": 15473, + "oooooooo": 43408, + "oooooooooooooooo": 48645, + "oop": 7326, + "ooper": 39906, + "oops": 9116, + "oor": 35239, + "oos": 9896, + "oosa": 30834, + "oose": 38941, + "oot": 17667, + "ootball": 28914, + "ootd": 16547, + "ooth": 12682, + "oott": 34316, + "ooza": 22809, + "op": 676, + "op": 3691, + "opa": 28949, + "opal": 28982, + "opar": 18167, + "opath": 33079, + "opathic": 37521, + "opathy": 28466, + "opau": 27239, + "opd": 38288, + "ope": 31694, + "ope": 11440, + "opec": 33138, + "opel": 36952, + "open": 3647, + "open": 1488, + "openaccess": 26591, + "opend": 28069, + "opendata": 35709, + "openday": 46991, + "opened": 5303, + "opener": 8998, + "openhouse": 36091, + "opening": 33728, + "opening": 2516, + "openingday": 36359, + "openings": 27643, + "openly": 23005, + "opens": 4801, + "opensource": 29930, + "oper": 2796, + "oper": 37533, + "opera": 8056, + "operate": 19306, + "operated": 23031, + "operates": 38675, + "operating": 12218, + "operation": 27173, + "operation": 7639, + "operational": 18237, + "operations": 8106, + "operative": 28380, + "operator": 15972, + "operators": 19267, + "opers": 48728, + "opes": 37258, + "oph": 6796, + "opha": 38634, + "ophel": 45017, + "ophelia": 49118, + "ophi": 44547, + "ophile": 35915, + "opho": 12900, + "ophobia": 21111, + "ophobic": 29934, + "ophon": 25120, + "ophone": 26345, + "ophthal": 33135, + "ophy": 28539, + "opi": 40056, + "opi": 48994, + "opin": 7636, + "opini": 14825, + "opinion": 7843, + "opinions": 16192, + "opio": 17371, + "opioid": 22833, + "opioids": 47578, + "opla": 36270, + "ople": 25663, + "opol": 15173, + "opoly": 23729, + "opor": 39650, + "opoulos": 42020, + "opp": 2020, + "opp": 21024, + "oppa": 23637, + "oppo": 7399, + "oppo": 41770, + "opponent": 17002, + "opponents": 19664, + "oppor": 2914, + "opportun": 2939, + "opportunities": 5978, + "opportunity": 4004, + "oppos": 10091, + "oppose": 23617, + "opposed": 22509, + "opposes": 47471, + "opposing": 24376, + "opposite": 12872, + "opposition": 11062, + "oppre": 17341, + "oppressed": 41492, + "oppression": 30650, + "opra": 28291, + "oprah": 22562, + "opry": 35340, + "ops": 3054, + "opt": 45103, + "opt": 27188, + "opted": 42035, + "opti": 6580, + "optic": 25190, + "optic": 24755, + "optical": 16822, + "optics": 27165, + "optim": 22331, + "optimal": 25235, + "optimi": 9737, + "optimis": 39459, + "optimism": 25226, + "optimist": 44581, + "optimistic": 23104, + "optimization": 25125, + "optimize": 30456, + "optimized": 43939, + "optimizing": 49157, + "optimum": 35974, + "optimus": 43453, + "option": 8464, + "optional": 25411, + "options": 7063, + "optome": 35533, + "opul": 39858, + "opus": 33295, + "opy": 21835, + "or": 523, + "or": 541, + "ora": 4301, + "orac": 24673, + "oracle": 37308, + "oracle": 15966, + "orah": 40820, + "orail": 45120, + "oral": 32490, + "oral": 6007, + "orama": 33619, + "oran": 32209, + "oran": 28395, + "orang": 22116, + "orange": 13957, + "orange": 4287, + "oranges": 32417, + "orangu": 36112, + "orb": 28894, + "orb": 36958, + "orbit": 19713, + "orbital": 40312, + "orc": 44305, + "orca": 18631, + "orcas": 47676, + "orch": 11893, + "orchar": 40226, + "orchard": 19530, + "orche": 8004, + "orchestr": 42937, + "orchestra": 9573, + "orchestral": 40285, + "orchi": 23696, + "orchid": 18678, + "orchids": 28376, + "ord": 26903, + "ord": 11502, + "orda": 33462, + "ordained": 38302, + "order": 24613, + "order": 2191, + "ordered": 8335, + "ordering": 19588, + "orderly": 43457, + "orders": 6187, + "ordin": 4378, + "ordinance": 38583, + "ordinary": 8012, + "ore": 3580, + "ore": 1423, + "orean": 36696, + "ored": 5133, + "oregon": 21759, + "oregon": 8035, + "oren": 21645, + "oreo": 21873, + "oreos": 41688, + "ores": 17328, + "org": 3401, + "org": 5593, + "organ": 3338, + "organ": 13213, + "organi": 3636, + "organic": 24080, + "organic": 5980, + "organics": 44199, + "organis": 13204, + "organisation": 15868, + "organisations": 20651, + "organise": 36073, + "organised": 13191, + "organiser": 49141, + "organisers": 35778, + "organising": 22787, + "organisms": 37041, + "organiz": 11107, + "organization": 8064, + "organizational": 29510, + "organizations": 13453, + "organize": 19973, + "organized": 10681, + "organizer": 23905, + "organizers": 27191, + "organizing": 15779, + "organs": 29872, + "orgs": 29500, + "ori": 1540, + "ori": 8693, + "oria": 11474, + "orial": 8648, + "orian": 21193, + "oric": 43810, + "orice": 41341, + "orie": 18815, + "orient": 13149, + "orient": 30770, + "oriental": 23056, + "orientation": 16873, + "oriente": 40390, + "oriented": 24596, + "orienteering": 42985, + "ories": 5934, + "orig": 2273, + "orig": 38463, + "origami": 31832, + "origin": 2555, + "origin": 12372, + "original": 18496, + "original": 3117, + "originally": 12849, + "originals": 16953, + "originated": 41823, + "origins": 16291, + "orin": 39863, + "oring": 3006, + "orio": 24308, + "orioles": 21430, + "orion": 21765, + "oris": 37064, + "orities": 7903, + "ority": 5556, + "orium": 12015, + "ork": 22202, + "ork": 37235, + "orkney": 34254, + "orl": 39465, + "orlando": 32247, + "orlando": 7827, + "orleans": 11127, + "orm": 38464, + "orn": 25412, + "orn": 8130, + "ornam": 36122, + "ornament": 23409, + "ornamental": 46270, + "ornaments": 28968, + "ornate": 46865, + "orni": 27713, + "ornithology": 38275, + "orns": 19340, + "oro": 9848, + "oro": 14573, + "orous": 19286, + "orph": 17318, + "orphan": 22718, + "orphan": 28994, + "orphanage": 45196, + "orphaned": 46792, + "orphans": 36588, + "orphe": 39186, + "orr": 32977, + "ors": 1127, + "orship": 20846, + "ort": 1019, + "ortega": 39727, + "orth": 22584, + "orth": 24461, + "ortho": 11366, + "orthodon": 37730, + "orthodox": 19008, + "orthop": 42123, + "orthopedic": 49341, + "ortiz": 23544, + "orton": 37238, + "oru": 44629, + "oru": 31281, + "orum": 42724, + "orwell": 41218, + "ory": 16983, + "ory": 1985, + "os": 2211, + "os": 1299, + "osa": 16340, + "osa": 17237, + "osaka": 21347, + "osborne": 22402, + "osbourne": 43376, + "osc": 5092, + "oscar": 21157, + "oscar": 8191, + "oscars": 11098, + "osce": 37303, + "oscill": 38272, + "ose": 46942, + "ose": 22541, + "osh": 30717, + "osh": 35011, + "osha": 33907, + "oshi": 34770, + "osi": 25247, + "osi": 17636, + "osis": 13903, + "osity": 12730, + "oslo": 20547, + "osm": 31626, + "osman": 46539, + "oso": 42793, + "oso": 21285, + "osp": 24387, + "ospre": 49001, + "osprey": 37893, + "oss": 29362, + "oss": 34640, + "ost": 23701, + "ost": 18749, + "oste": 20632, + "osteo": 43163, + "oster": 31781, + "ostr": 33673, + "ostrich": 47640, + "osu": 29480, + "osu": 19818, + "oswald": 38471, + "ot": 1863, + "ot": 2062, + "ota": 17509, + "ota": 8741, + "otago": 45919, + "otaku": 40743, + "otas": 47616, + "otc": 37934, + "otd": 5683, + "ote": 28511, + "ote": 19744, + "otes": 27280, + "oth": 33262, + "oth": 33519, + "other": 9758, + "other": 1010, + "others": 3326, + "otherwise": 12376, + "oti": 19567, + "oti": 45564, + "otic": 9671, + "otis": 28246, + "otive": 10877, + "oto": 23946, + "oto": 23399, + "otp": 29822, + "otr": 38685, + "ots": 5769, + "ott": 10167, + "ott": 7936, + "otta": 7623, + "otta": 20941, + "ottawa": 49027, + "ottawa": 9019, + "otte": 35214, + "otter": 34710, + "otter": 22456, + "otters": 38883, + "otti": 36721, + "ottnews": 33995, + "otto": 17730, + "ottoman": 27503, + "otw": 35259, + "otwol": 46868, + "ou": 520, + "ou": 6544, + "ouat": 32954, + "ouch": 13493, + "oud": 1359, + "oue": 48838, + "ouf": 34618, + "ough": 4204, + "ough": 991, + "ought": 2253, + "oughton": 36860, + "oui": 39421, + "ouk": 21796, + "oul": 20253, + "oul": 8081, + "ould": 859, + "oulos": 32808, + "oun": 636, + "oun": 20960, + "ounce": 15027, + "ounces": 30299, + "ound": 2013, + "ound": 853, + "oundation": 40132, + "ounded": 9634, + "ounding": 11944, + "ounds": 2753, + "oung": 35875, + "oung": 25341, + "ounge": 29427, + "ount": 43801, + "ount": 4172, + "ounts": 10963, + "oup": 32815, + "our": 727, + "our": 581, + "oura": 29806, + "oura": 36352, + "ourable": 24126, + "ourage": 34525, + "oural": 45840, + "oured": 6956, + "ouri": 12696, + "ouring": 12000, + "ourism": 25496, + "ourke": 26480, + "ourlives": 37541, + "ouro": 41224, + "ours": 1491, + "ourse": 15415, + "ourselves": 10124, + "ourt": 22960, + "oury": 29484, + "ous": 1987, + "ous": 879, + "ouse": 32048, + "ouse": 7603, + "ouses": 33666, + "ously": 2501, + "ousness": 10689, + "ousy": 28302, + "out": 1130, + "out": 620, + "outa": 35187, + "outage": 27320, + "outages": 40353, + "outback": 28532, + "outbound": 41256, + "outbreak": 20103, + "outcome": 16552, + "outcomes": 14016, + "outdated": 38313, + "outdoor": 19184, + "outdoor": 6368, + "outdoors": 10469, + "oute": 44180, + "outed": 34435, + "outer": 30499, + "outer": 14188, + "outes": 39600, + "outfield": 41826, + "outfit": 6525, + "outfits": 16366, + "outfitters": 37725, + "outfy": 34920, + "outgoing": 27302, + "outh": 16933, + "outh": 8111, + "outine": 35452, + "outing": 11251, + "outlander": 45820, + "outlander": 17095, + "outlaw": 37498, + "outlaw": 27340, + "outlaws": 30935, + "outlet": 16855, + "outlets": 20822, + "outline": 26894, + "outlines": 29159, + "outlining": 45960, + "outlook": 12983, + "outof": 43958, + "outpatient": 46603, + "outpost": 44622, + "output": 17255, + "outra": 14262, + "outrage": 23577, + "outraged": 43402, + "outrageous": 29342, + "outre": 14373, + "outreach": 15297, + "outright": 38200, + "outs": 5790, + "outsi": 22515, + "outside": 47693, + "outside": 2782, + "outsider": 41196, + "outsiders": 41742, + "outskirts": 42088, + "outsourcing": 34543, + "outstanding": 6387, + "outta": 15807, + "outtuesday": 48692, + "outw": 34650, + "oux": 40960, + "oux": 14228, + "ov": 6420, + "ov": 8479, + "ova": 12762, + "oval": 15039, + "ovarian": 42913, + "ovation": 24333, + "ove": 8649, + "ove": 15456, + "oven": 44620, + "oven": 12579, + "over": 1658, + "over": 962, + "overall": 6914, + "overboard": 42982, + "overcame": 47235, + "overcast": 36942, + "overcome": 14365, + "overcoming": 29348, + "overdose": 27017, + "overdrive": 40088, + "overdue": 30240, + "overflow": 32885, + "overflowing": 45370, + "overhaul": 31531, + "overhead": 20321, + "overland": 38808, + "overlay": 44827, + "overload": 24327, + "overlook": 35767, + "overlooked": 27632, + "overlooking": 17319, + "overly": 28820, + "overnight": 9913, + "overpass": 44310, + "overrated": 38214, + "overs": 45774, + "overs": 17329, + "overseas": 15100, + "oversight": 32494, + "oversized": 31557, + "overtime": 19347, + "overturned": 31048, + "overview": 14789, + "overwatch": 18124, + "overweight": 43465, + "overwhel": 12204, + "overwhelmed": 23459, + "overwhelming": 20306, + "overwhelmingly": 43549, + "ovi": 32508, + "ovic": 22417, + "ovich": 27623, + "ovie": 47677, + "ovo": 41920, + "ovo": 18065, + "ovski": 26167, + "ow": 2032, + "ow": 2250, + "owa": 32770, + "owe": 19073, + "owed": 37641, + "owen": 24838, + "owen": 12056, + "owens": 20664, + "owes": 35069, + "owing": 48582, + "owl": 34332, + "owl": 9899, + "owls": 18247, + "own": 3845, + "own": 1758, + "owned": 8536, + "owner": 5019, + "owners": 7712, + "ownership": 16583, + "owning": 24661, + "owns": 17533, + "owo": 46142, + "ows": 27423, + "owski": 22573, + "ox": 3282, + "ox": 12071, + "oxfam": 45466, + "oxford": 28588, + "oxford": 8824, + "oxfordshire": 37855, + "oxi": 33731, + "oxi": 48147, + "oxid": 17701, + "oxide": 28235, + "oxo": 37088, + "oxy": 12432, + "oxygen": 16214, + "oy": 6638, + "oy": 12437, + "oya": 38894, + "oye": 48677, + "oyster": 40545, + "oyster": 17253, + "oysters": 22672, + "oz": 10584, + "oz": 6044, + "ozar": 31848, + "ozil": 41365, + "ozone": 37052, + "ozzy": 39549, + "p": 79, + "p": 335, + "pa": 765, + "pa": 2217, + "paa": 32812, + "pab": 9354, + "pablo": 42172, + "pablo": 14473, + "pac": 2332, + "pac": 7608, + "pace": 40600, + "pace": 9450, + "paced": 32611, + "pacers": 23976, + "paces": 43001, + "paci": 5699, + "pacific": 19723, + "pacific": 6654, + "pacing": 45202, + "pack": 2711, + "pack": 3420, + "package": 7053, + "packaged": 29656, + "packages": 14305, + "packaging": 11658, + "packard": 46421, + "packed": 5883, + "packer": 28209, + "packers": 14294, + "packet": 25022, + "packets": 40448, + "packing": 9829, + "packs": 11086, + "paco": 41364, + "pacqui": 28456, + "pacquiao": 30485, + "pact": 27182, + "pad": 3798, + "pad": 7601, + "padded": 42253, + "paddington": 33162, + "paddle": 38276, + "paddle": 20811, + "paddling": 40645, + "paddock": 29590, + "paddy": 33103, + "paddy": 19855, + "padi": 47037, + "padilla": 22380, + "padma": 44595, + "padma": 46457, + "padre": 38343, + "padres": 22829, + "pads": 17353, + "paedi": 41488, + "paella": 46924, + "paf": 47185, + "pafc": 49259, + "pag": 4151, + "pag": 30525, + "pagan": 27854, + "page": 14996, + "page": 2504, + "pageant": 22139, + "pages": 8082, + "pagoda": 44309, + "pah": 41054, + "pah": 26884, + "pai": 20624, + "pai": 21198, + "paid": 5057, + "paige": 33659, + "paige": 16022, + "paign": 31796, + "pain": 2141, + "pain": 4495, + "paine": 38069, + "painful": 16361, + "pains": 25639, + "paint": 7948, + "paint": 5185, + "paintball": 39730, + "painted": 6433, + "painter": 10888, + "painters": 35703, + "painting": 49164, + "painting": 3086, + "paintings": 9956, + "paints": 21672, + "pair": 19848, + "pair": 4038, + "paired": 12433, + "pairing": 16313, + "pairings": 41152, + "pairs": 9950, + "pais": 16878, + "paisley": 22954, + "pajam": 24110, + "pajama": 40244, + "pajamas": 37231, + "pak": 13186, + "pak": 9094, + "paki": 3438, + "pakistan": 10713, + "pakistan": 3994, + "pakistani": 14050, + "pakistanis": 45707, + "pakv": 38196, + "pal": 1850, + "pal": 3611, + "pala": 17895, + "palace": 6381, + "palaces": 45625, + "palad": 28371, + "palae": 43379, + "palais": 35673, + "palate": 34666, + "palawan": 48202, + "palazzo": 36006, + "pale": 4768, + "pale": 12518, + "paleo": 36741, + "paleo": 22198, + "paler": 38028, + "palermo": 40635, + "palestin": 9449, + "palestine": 11682, + "palestinian": 11764, + "palestinians": 21874, + "palette": 13901, + "pali": 48063, + "palin": 40153, + "palis": 44256, + "pality": 27296, + "pall": 35817, + "palla": 21208, + "palladium": 37888, + "pallet": 39057, + "palli": 28954, + "palliative": 46014, + "pally": 46073, + "palm": 19651, + "palm": 8612, + "palma": 29888, + "palmer": 40112, + "palmer": 13633, + "palms": 27059, + "palo": 31562, + "palom": 47698, + "palooza": 25861, + "pals": 11043, + "palsy": 46651, + "pam": 8228, + "pam": 18513, + "pamela": 26991, + "pamp": 37653, + "pamper": 44345, + "pamph": 41332, + "pan": 1072, + "pan": 7437, + "panam": 24606, + "panama": 15522, + "panas": 26207, + "panasonic": 29750, + "pancake": 18723, + "pancakes": 15308, + "panch": 27251, + "pancra": 42472, + "pancre": 27708, + "pancreatic": 49337, + "pancy": 41625, + "pand": 5631, + "panda": 12952, + "pandas": 35119, + "pande": 38419, + "pandey": 34895, + "pandit": 41191, + "pandor": 30250, + "pandora": 17727, + "pandoramusic": 42344, + "pane": 27470, + "panel": 3724, + "paneli": 19410, + "panelist": 39719, + "panelists": 24619, + "panels": 12735, + "panera": 48471, + "pang": 16756, + "pang": 23672, + "panhandle": 40919, + "pani": 36092, + "panic": 46671, + "panic": 14124, + "panini": 30410, + "pann": 42302, + "panna": 49065, + "pano": 36165, + "panor": 12962, + "panorama": 19763, + "panoramic": 22563, + "pans": 35204, + "pant": 22550, + "panther": 22825, + "panther": 13262, + "panthers": 10494, + "panties": 32515, + "panto": 28776, + "pantry": 25608, + "pants": 5003, + "panty": 44217, + "pany": 45567, + "panzer": 41159, + "pao": 33790, + "paola": 44689, + "paolo": 48488, + "paolo": 21133, + "pap": 1884, + "pap": 30756, + "papa": 12211, + "papar": 32782, + "paparazzi": 37842, + "papaya": 44098, + "paper": 8680, + "paper": 2802, + "paperback": 17928, + "papers": 8204, + "paperwork": 35785, + "papi": 35177, + "papp": 26361, + "paprika": 44793, + "papua": 32629, + "par": 699, + "par": 9163, + "para": 18355, + "para": 8976, + "parach": 23147, + "parachute": 30122, + "parad": 37143, + "parade": 5809, + "parades": 46479, + "paradi": 6658, + "paradig": 27786, + "paradigm": 33485, + "paradise": 45869, + "paradise": 7247, + "paradox": 33109, + "parag": 11866, + "paragon": 48099, + "paragra": 24903, + "paragraph": 28499, + "paragu": 38021, + "paraguay": 43579, + "paral": 15143, + "paralle": 13184, + "parallel": 18201, + "paralleled": 42520, + "parallels": 46101, + "paraly": 30255, + "paralym": 18727, + "paralympic": 30806, + "paralympics": 37162, + "paralysis": 45702, + "param": 12250, + "parame": 27106, + "paramedic": 34630, + "paramedics": 35991, + "parameters": 44890, + "paramore": 34401, + "paramount": 26642, + "parano": 30283, + "paranoid": 43029, + "paranor": 16940, + "paranormal": 19047, + "parap": 41091, + "paras": 15198, + "parasite": 42460, + "parasites": 46175, + "parc": 30914, + "parcel": 30367, + "parcels": 45589, + "pard": 18773, + "pardon": 47606, + "pardon": 26565, + "pare": 18202, + "pared": 5498, + "paren": 3106, + "parent": 47848, + "parent": 10183, + "parental": 28339, + "parenthood": 23887, + "parenting": 14529, + "parents": 3731, + "pares": 12420, + "parfait": 46140, + "pari": 17961, + "pari": 27979, + "paris": 13982, + "paris": 3445, + "parisagreement": 47405, + "parish": 47328, + "parish": 13020, + "parisi": 45081, + "parisian": 38512, + "parity": 42734, + "park": 4985, + "park": 1452, + "parked": 16487, + "parker": 31119, + "parker": 8365, + "parkin": 34868, + "parking": 5984, + "parkinson": 28129, + "parkland": 31287, + "parkrun": 25747, + "parks": 6873, + "parkway": 19882, + "parl": 30373, + "parl": 29897, + "parliam": 5941, + "parliament": 41599, + "parliament": 7151, + "parliamentary": 17912, + "parlor": 38253, + "parlour": 37829, + "parma": 36077, + "parme": 26295, + "parmesan": 27274, + "paro": 17429, + "parody": 24318, + "parole": 32158, + "parr": 44113, + "parrish": 43043, + "parrot": 23565, + "parry": 40604, + "parsley": 30077, + "parsons": 22505, + "part": 1872, + "part": 1551, + "parte": 48508, + "parth": 34790, + "parti": 10509, + "partial": 18957, + "partially": 21269, + "partic": 2871, + "partici": 9540, + "particip": 4400, + "participant": 27674, + "participants": 10237, + "participate": 9433, + "participated": 14252, + "participates": 46414, + "participating": 11535, + "participation": 13529, + "particle": 27716, + "particles": 27012, + "particul": 11408, + "particular": 14098, + "particularly": 12170, + "parties": 9032, + "parting": 32844, + "partisan": 20772, + "partist": 44713, + "partition": 42219, + "partly": 21459, + "partner": 5210, + "partner": 4568, + "partnered": 21402, + "partnering": 21182, + "partners": 5568, + "partnership": 6123, + "partnerships": 17418, + "parton": 43245, + "partridge": 34872, + "parts": 5149, + "party": 12877, + "party": 1580, + "partying": 25702, + "pas": 1341, + "pas": 9525, + "pasadena": 25892, + "pascal": 28626, + "pasco": 49220, + "pascu": 42692, + "pash": 23936, + "pasha": 46986, + "paso": 18542, + "pasqu": 44941, + "pass": 5016, + "pass": 3511, + "passage": 16477, + "passages": 48937, + "passed": 4957, + "passenger": 12311, + "passengers": 12781, + "passer": 48544, + "passes": 7633, + "passi": 32471, + "passing": 6589, + "passion": 8822, + "passion": 5332, + "passionate": 10947, + "passionately": 44028, + "passions": 38441, + "passive": 23171, + "passover": 38426, + "passport": 14739, + "passports": 46368, + "password": 20258, + "passwords": 43095, + "past": 7315, + "past": 2729, + "pasta": 10441, + "paste": 34765, + "paste": 17038, + "pastel": 19457, + "pastels": 45699, + "pastor": 19792, + "pastor": 9664, + "pastoral": 37191, + "pastors": 30959, + "pastr": 45478, + "pastries": 39409, + "pastry": 18582, + "pasture": 34764, + "pastures": 47793, + "pat": 1300, + "pat": 7036, + "patag": 29862, + "patagonia": 32786, + "patch": 29284, + "patch": 8721, + "patches": 22104, + "patchwork": 44675, + "patchy": 47488, + "pate": 42122, + "pate": 42098, + "patel": 14168, + "patent": 14692, + "patented": 37277, + "patents": 33911, + "paterson": 36560, + "path": 7408, + "path": 5035, + "pathetic": 18222, + "pathfinder": 35415, + "pathi": 34976, + "pathi": 27347, + "pathic": 49025, + "patho": 18534, + "pathology": 23290, + "paths": 16333, + "pathway": 23488, + "pathways": 24690, + "pathy": 13330, + "pati": 2799, + "pati": 26708, + "patience": 13575, + "patient": 30139, + "patient": 6262, + "patiently": 22980, + "patients": 5543, + "patil": 49187, + "patio": 14304, + "pational": 30627, + "patna": 45025, + "patory": 41859, + "patreon": 17165, + "patri": 4771, + "patriarch": 49054, + "patriarchy": 48806, + "patric": 12569, + "patrice": 40731, + "patricia": 18143, + "patrick": 12078, + "patrick": 5286, + "patricks": 46783, + "patriot": 28896, + "patriot": 15692, + "patrioti": 35520, + "patriotic": 20217, + "patriotism": 35807, + "patriots": 8707, + "patro": 31650, + "patrol": 10073, + "patrolling": 39344, + "patrols": 35978, + "patron": 26658, + "patron": 17683, + "patrons": 28308, + "pats": 24874, + "patsy": 46093, + "patt": 12637, + "patter": 4982, + "pattern": 7447, + "patterned": 47212, + "patterns": 11637, + "patterson": 21384, + "patti": 44927, + "patti": 26123, + "pattinson": 32474, + "patton": 29026, + "patty": 48741, + "patty": 18321, + "pau": 1834, + "pau": 35970, + "paul": 6035, + "paul": 2597, + "paula": 37363, + "paula": 16777, + "pauline": 30438, + "paulo": 48002, + "paulo": 21628, + "pauls": 41413, + "pauls": 40010, + "paulson": 48201, + "pause": 19439, + "paused": 46782, + "pav": 6661, + "pave": 37107, + "paved": 27898, + "pavel": 43152, + "pavement": 27669, + "pavilion": 13374, + "paving": 28651, + "paw": 14009, + "paw": 16016, + "pawan": 29754, + "pawankalyan": 33702, + "pawn": 43195, + "paws": 16714, + "pax": 20007, + "pax": 19033, + "paxton": 38347, + "pay": 2642, + "pay": 3345, + "payback": 36413, + "paycheck": 45078, + "payday": 26957, + "payee": 46985, + "payer": 41503, + "paying": 8341, + "payment": 10596, + "payments": 11832, + "payne": 12775, + "paypal": 21442, + "payroll": 31610, + "pays": 10845, + "paysoff": 48174, + "paytm": 45352, + "payton": 27348, + "paz": 22267, + "pb": 20112, + "pb": 10981, + "pba": 28205, + "pbb": 48567, + "pbb": 40589, + "pbc": 49191, + "pbl": 35166, + "pbr": 32998, + "pbs": 17908, + "pc": 6782, + "pc": 3808, + "pca": 35705, + "pcb": 26235, + "pcc": 36059, + "pci": 38957, + "pcm": 47436, + "pcr": 35704, + "pcs": 11917, + "pcso": 31963, + "pct": 22168, + "pd": 4387, + "pd": 4675, + "pdates": 16842, + "pdc": 40498, + "pdf": 15181, + "pdp": 24601, + "pdt": 21743, + "pdx": 25470, + "pdx": 16153, + "pe": 661, + "pe": 956, + "pea": 13915, + "peabo": 34083, + "peabody": 41244, + "peac": 34615, + "peace": 6249, + "peace": 3021, + "peaceful": 9461, + "peacefully": 30530, + "peacekeeping": 43630, + "peach": 10522, + "peach": 11538, + "peaches": 27216, + "peak": 18572, + "peak": 6026, + "peakdistrict": 41289, + "peake": 24810, + "peaked": 36391, + "peaks": 14067, + "pean": 11563, + "peanu": 25843, + "peanut": 12491, + "peanuts": 26503, + "pear": 4910, + "pear": 18820, + "pearce": 25996, + "pearl": 21806, + "pearl": 8560, + "pearljam": 46739, + "pearls": 19581, + "pears": 39565, + "pearson": 20461, + "peas": 15937, + "peasant": 40621, + "peasants": 48788, + "peat": 26914, + "pebble": 28056, + "pebbles": 40155, + "pec": 32447, + "pec": 17611, + "pecan": 32177, + "peck": 25186, + "peck": 29234, + "pecker": 30169, + "peckham": 45863, + "pecu": 34200, + "peculiar": 42808, + "ped": 13197, + "ped": 2966, + "pedago": 34590, + "pedagogy": 48072, + "pedal": 32943, + "pedal": 19621, + "pedals": 38535, + "pede": 12862, + "pede": 19560, + "pedestri": 30027, + "pedestrian": 18256, + "pedestrians": 33895, + "pedi": 12967, + "pedia": 11733, + "pediatric": 48431, + "pediatric": 22071, + "pedic": 35319, + "pedic": 44528, + "pedro": 29963, + "pedro": 15114, + "peds": 45377, + "pee": 12988, + "pee": 11196, + "peed": 47369, + "peek": 46323, + "peek": 7569, + "peeking": 48771, + "peel": 34386, + "peel": 17158, + "peeled": 33533, + "peeling": 48649, + "peep": 25425, + "peep": 16857, + "peeps": 11681, + "peer": 32416, + "peer": 14432, + "peers": 21626, + "pees": 31830, + "peg": 32182, + "peg": 11207, + "pegas": 30018, + "pegasus": 37822, + "peggy": 24271, + "pei": 48166, + "pei": 12917, + "pel": 4286, + "pel": 7006, + "pele": 44105, + "pelican": 34131, + "pelicans": 29363, + "pell": 46981, + "pelle": 31267, + "pelled": 32506, + "pellegr": 38529, + "pellets": 48240, + "pelo": 40192, + "pelo": 40238, + "pelosi": 22169, + "pelvic": 45646, + "pemb": 19880, + "pembro": 24084, + "pembroke": 36702, + "pembroke": 40044, + "pembrokeshire": 40695, + "pen": 1501, + "pen": 5356, + "pena": 35788, + "penalties": 25417, + "penalty": 11491, + "penang": 29545, + "penc": 20065, + "pence": 18002, + "pencil": 41303, + "pencil": 11200, + "pencils": 21909, + "pend": 3052, + "pendant": 12415, + "pendants": 44117, + "pending": 12770, + "pendleton": 44272, + "pendu": 45336, + "penelope": 36703, + "penetr": 26058, + "peng": 42955, + "peng": 39200, + "pengu": 8854, + "penguin": 28249, + "penguin": 14952, + "penguins": 16557, + "peninsu": 13464, + "peninsula": 14070, + "penn": 7760, + "penn": 11128, + "pennant": 43971, + "penned": 45077, + "penney": 47856, + "pennies": 43094, + "pennsylvania": 13673, + "penny": 20400, + "penny": 11388, + "pens": 13307, + "pens": 13310, + "pensac": 30925, + "pensacola": 33573, + "pension": 32840, + "pension": 17764, + "pensions": 29773, + "penske": 47154, + "pent": 10699, + "pent": 22725, + "pentagon": 23133, + "pente": 33165, + "penthouse": 32673, + "penultimate": 36553, + "peop": 1030, + "people": 10573, + "people": 1047, + "peoples": 28241, + "peoples": 14627, + "peopleschoice": 32418, + "peoplesvote": 45830, + "peoria": 36985, + "pep": 12761, + "pep": 14898, + "pepe": 24778, + "pepp": 34425, + "pepper": 14861, + "pepper": 8253, + "peppermint": 30321, + "pepperoni": 47307, + "peppers": 14650, + "pepsi": 21307, + "per": 703, + "per": 1284, + "pera": 26294, + "perce": 24135, + "perceived": 38436, + "percent": 16328, + "percent": 9017, + "percentage": 19477, + "percep": 28017, + "perception": 20591, + "perceptions": 38138, + "perch": 34281, + "perched": 40071, + "percu": 41722, + "percussion": 23980, + "percy": 23940, + "pere": 8665, + "pere": 36300, + "pered": 24509, + "peregr": 37479, + "peregrine": 44546, + "pereira": 43927, + "peren": 24564, + "perenni": 26996, + "perennial": 34038, + "perez": 15107, + "perf": 22816, + "perfe": 1624, + "perfec": 6599, + "perfect": 17261, + "perfect": 1878, + "perfection": 9646, + "perfectly": 8037, + "perfecto": 42898, + "perfor": 2311, + "perform": 3866, + "perform": 5940, + "performan": 8973, + "performance": 2714, + "performances": 9553, + "performed": 9997, + "performer": 17061, + "performers": 18476, + "performing": 5170, + "performs": 13839, + "perfu": 14214, + "perfume": 17525, + "perhaps": 9297, + "peri": 12618, + "peri": 44068, + "perience": 19302, + "peril": 40119, + "peril": 48301, + "perimeter": 38499, + "pering": 29746, + "perio": 5101, + "period": 6131, + "periodic": 36476, + "periods": 24401, + "periph": 35308, + "peripheral": 43901, + "peris": 19461, + "periscope": 21668, + "perk": 33424, + "perkins": 20057, + "perks": 17660, + "perl": 44018, + "perm": 47847, + "perman": 9018, + "permanent": 11144, + "permanently": 25584, + "perme": 42456, + "permission": 15822, + "permit": 21950, + "permits": 33267, + "permitted": 44380, + "pero": 23551, + "perpe": 15749, + "perpetr": 33376, + "perpetu": 30132, + "perpetual": 32018, + "perrie": 32691, + "perry": 28478, + "perry": 7899, + "pers": 3688, + "pers": 10710, + "perse": 27498, + "persecu": 22878, + "persecution": 32009, + "perseverance": 29820, + "persi": 11509, + "persian": 19859, + "persist": 19412, + "persist": 40938, + "persistence": 34588, + "persistent": 29028, + "person": 3510, + "person": 2533, + "persona": 18401, + "personal": 10114, + "personal": 4121, + "personalised": 24186, + "personalities": 27888, + "personality": 10386, + "personalized": 17845, + "personally": 13885, + "personnel": 14546, + "persons": 14592, + "perspec": 17997, + "perspective": 8996, + "perspectives": 18777, + "persu": 20972, + "pert": 36970, + "pert": 16306, + "perth": 19067, + "perth": 11011, + "peru": 20612, + "peru": 12964, + "peruvian": 30822, + "pes": 38368, + "pes": 2598, + "pesa": 47409, + "pesc": 44044, + "pesh": 33184, + "peshaw": 28524, + "peshawar": 29230, + "pesky": 42512, + "pesos": 47872, + "pessi": 43902, + "pest": 20130, + "pest": 9425, + "pesticide": 48481, + "pesticides": 37868, + "pesto": 26186, + "pests": 41919, + "pet": 2167, + "pet": 3703, + "peta": 28785, + "petal": 38430, + "petal": 40469, + "petals": 26064, + "petday": 45314, + "pete": 14479, + "pete": 8571, + "peter": 5093, + "peter": 3696, + "peterborough": 26012, + "peters": 16336, + "petersburg": 21052, + "petersen": 39794, + "peterson": 16877, + "peth": 48920, + "petit": 36437, + "petit": 21276, + "petite": 27213, + "petition": 10975, + "petitions": 43536, + "petr": 29808, + "petra": 31300, + "petre": 47179, + "petri": 31831, + "petro": 8716, + "petrol": 18149, + "petroleum": 22063, + "petron": 42875, + "pets": 7663, + "pett": 27051, + "petti": 48001, + "petting": 44334, + "petty": 17324, + "peu": 21411, + "peuge": 22893, + "peugeot": 24129, + "pew": 21608, + "pew": 30783, + "pewdie": 41882, + "pewdiepie": 42563, + "pex": 43765, + "pey": 14966, + "pey": 30933, + "peyton": 49254, + "peyton": 20307, + "pez": 45798, + "pez": 10482, + "pf": 16680, + "pf": 12572, + "pfa": 47839, + "pfc": 35007, + "pff": 44121, + "pfi": 29810, + "pfw": 31229, + "pg": 12476, + "pg": 5211, + "pga": 13351, + "pgat": 36514, + "pgatour": 40094, + "pgh": 44862, + "pgh": 30031, + "pgs": 49204, + "ph": 745, + "ph": 2042, + "pha": 4443, + "pha": 26255, + "phal": 19962, + "phan": 8731, + "phan": 40126, + "phant": 36998, + "phantom": 37688, + "phantom": 14490, + "phar": 5570, + "phara": 35792, + "pharaoh": 40437, + "pharm": 45761, + "pharma": 17831, + "pharmac": 8193, + "pharmaceu": 19490, + "pharmaceutical": 25217, + "pharmaceuticals": 44623, + "pharmacist": 41024, + "pharmacists": 44337, + "pharmacy": 15293, + "pharo": 42308, + "pharoah": 49287, + "pharrell": 31316, + "phase": 8304, + "phases": 35337, + "phat": 42492, + "phc": 41102, + "phd": 20875, + "phd": 8472, + "phdchat": 39564, + "phdlife": 39638, + "phe": 4787, + "phe": 19853, + "pheasant": 41983, + "phee": 41292, + "phel": 23711, + "phelps": 27128, + "phen": 7718, + "pheno": 47336, + "phenom": 31673, + "phenom": 39618, + "phenomen": 11304, + "phenomena": 41538, + "phenomenal": 15035, + "phenomenon": 24464, + "pher": 9194, + "pher": 19828, + "phers": 29531, + "pherson": 36421, + "phew": 10295, + "phi": 2239, + "phi": 12220, + "phia": 9228, + "phic": 3977, + "phie": 30237, + "phies": 17062, + "phil": 2821, + "phil": 6199, + "phila": 47443, + "philadel": 9428, + "philadelphia": 9749, + "philanthro": 16587, + "philanthropist": 44153, + "philanthropy": 25047, + "philately": 33695, + "phile": 36543, + "philharmon": 25228, + "philharmonic": 31699, + "phili": 4277, + "philia": 46654, + "philip": 20748, + "philip": 11074, + "philipp": 5623, + "philipp": 47591, + "philippe": 20942, + "philippine": 17629, + "philippines": 8149, + "philips": 25175, + "phill": 42346, + "phill": 48272, + "philli": 6456, + "phillies": 18748, + "phillip": 48832, + "phillip": 19323, + "phillips": 11041, + "philly": 19545, + "philly": 7785, + "philos": 8395, + "philosop": 20349, + "philosoph": 10187, + "philosopher": 25220, + "philosophical": 32628, + "philosophy": 12213, + "phils": 38573, + "phin": 33816, + "phine": 40985, + "phins": 40210, + "phish": 36897, + "phishing": 36546, + "phl": 25603, + "pho": 816, + "pho": 22707, + "phobia": 28749, + "phoe": 22673, + "phoebe": 27582, + "phoeni": 6778, + "phoenix": 20615, + "phoenix": 7793, + "phol": 48140, + "phon": 19602, + "phon": 31115, + "phone": 15486, + "phone": 1951, + "phones": 6351, + "phony": 31925, + "phora": 31363, + "phosp": 22638, + "photo": 1153, + "photo": 1125, + "photobomb": 37075, + "photobook": 41894, + "photog": 28115, + "photogenic": 36108, + "photogra": 36754, + "photograph": 1688, + "photograph": 8853, + "photographed": 11573, + "photographer": 5748, + "photographers": 17141, + "photographic": 22053, + "photographing": 30074, + "photographs": 15759, + "photography": 33183, + "photography": 2108, + "photom": 32223, + "photoo": 11106, + "photooftheday": 11933, + "photos": 2479, + "photoshoot": 11121, + "photoshop": 12419, + "photoshopped": 35738, + "phouse": 27848, + "php": 17370, + "phra": 12777, + "phrase": 18809, + "phrases": 35264, + "phs": 16495, + "phu": 21274, + "phuket": 34028, + "phx": 35466, + "phx": 29507, + "phy": 6484, + "phy": 4292, + "phyl": 35600, + "phyllis": 37844, + "phys": 3734, + "phys": 37894, + "physi": 13782, + "physic": 46641, + "physical": 44127, + "physical": 6671, + "physically": 18105, + "physician": 21055, + "physicians": 26702, + "physicist": 29052, + "physics": 9369, + "physio": 29574, + "physio": 29177, + "physiology": 32349, + "physique": 42884, + "phyto": 42197, + "pi": 741, + "pi": 5357, + "pia": 8918, + "pian": 24637, + "pianist": 21048, + "piano": 49278, + "piano": 7894, + "pianos": 47904, + "piazza": 28496, + "pic": 901, + "pic": 1282, + "pical": 5482, + "picard": 48507, + "picasso": 21481, + "piccad": 33876, + "piccadilly": 37287, + "piccollage": 43621, + "pick": 6379, + "pick": 3142, + "picked": 6018, + "picker": 43105, + "pickering": 47605, + "picket": 33559, + "picking": 9545, + "pickle": 24570, + "pickled": 21705, + "pickles": 25001, + "picks": 8551, + "pickup": 15382, + "pickups": 33383, + "picnic": 12007, + "pico": 23363, + "picoftheday": 18319, + "pics": 2559, + "pict": 18778, + "pictorial": 40640, + "picture": 11663, + "picture": 1674, + "pictured": 7647, + "pictures": 3646, + "picturesque": 24894, + "pid": 5225, + "piday": 48056, + "pie": 12065, + "pie": 5319, + "piece": 39632, + "piece": 2754, + "pieces": 6194, + "pied": 24686, + "pied": 12713, + "piedmont": 39691, + "pier": 5641, + "pier": 11348, + "pierc": 49216, + "pierce": 48462, + "pierce": 16782, + "pierced": 32799, + "piercing": 22557, + "piero": 43125, + "pierre": 34670, + "pierre": 11985, + "piers": 29030, + "pies": 6898, + "pieter": 44801, + "pietro": 42169, + "piff": 40719, + "pig": 12009, + "pig": 9619, + "pigeon": 18008, + "pigeons": 32910, + "piggy": 28245, + "pigment": 40284, + "pigs": 16228, + "pik": 48539, + "pika": 47372, + "pikach": 27268, + "pikachu": 28107, + "pike": 33457, + "pike": 14011, + "pil": 2893, + "pil": 20645, + "pilates": 29518, + "pile": 44403, + "pile": 13930, + "piled": 26873, + "piles": 31968, + "pilgri": 13966, + "pilgrim": 32662, + "pilgrimage": 24335, + "pilgrims": 31370, + "piling": 43050, + "pilip": 27234, + "pilipinas": 32392, + "pill": 14830, + "pill": 19226, + "pillar": 17322, + "pillars": 22054, + "pillow": 42237, + "pillow": 12182, + "pillows": 26499, + "pills": 23964, + "pilo": 37526, + "pilot": 31619, + "pilot": 6687, + "pilots": 15586, + "pilsner": 47153, + "pim": 15285, + "pim": 35472, + "pimp": 35789, + "pin": 2629, + "pin": 5164, + "pinball": 31679, + "pinch": 26114, + "pine": 9398, + "pine": 7374, + "pineapple": 14831, + "pines": 20338, + "ping": 23720, + "ping": 2089, + "pinion": 40557, + "pink": 11151, + "pink": 3360, + "pinkfloyd": 48520, + "pinky": 29803, + "pinn": 31448, + "pinnacle": 32754, + "pinned": 12165, + "pinning": 44515, + "pino": 36633, + "pinot": 41399, + "pinot": 21146, + "pinoy": 43578, + "pinoy": 35258, + "pins": 14619, + "pinst": 41173, + "pint": 42537, + "pint": 13584, + "pinterest": 15379, + "pinto": 35992, + "pints": 27935, + "pinup": 37349, + "pio": 22108, + "pion": 36728, + "pion": 29190, + "pione": 7975, + "pioneer": 34892, + "pioneer": 12459, + "pioneering": 25933, + "pioneers": 22383, + "pious": 42441, + "pip": 30854, + "pipe": 29333, + "pipe": 10459, + "pipel": 12387, + "pipeline": 14151, + "pipelines": 39683, + "piper": 47052, + "piper": 16293, + "pipes": 16991, + "piping": 40744, + "pippa": 47672, + "pir": 4351, + "pir": 38899, + "piracy": 39452, + "piran": 49034, + "pirate": 38680, + "pirate": 13592, + "pirates": 10442, + "pire": 16613, + "pires": 14988, + "pis": 9230, + "pis": 44441, + "pisa": 43632, + "pisces": 45982, + "piss": 20818, + "pissed": 17989, + "pist": 15556, + "pist": 32826, + "pistachi": 29760, + "pistachio": 36320, + "pistol": 20480, + "piston": 48236, + "pistons": 27242, + "pistor": 48162, + "pit": 2946, + "pit": 7476, + "pita": 27070, + "pitbull": 25295, + "pitch": 8992, + "pitch": 5872, + "pitched": 28447, + "pitcher": 13445, + "pitchers": 27835, + "pitches": 21005, + "pitching": 16455, + "piti": 47568, + "pits": 24144, + "pitt": 7607, + "pitt": 15599, + "pitts": 9531, + "pittsburgh": 10453, + "pity": 24380, + "pius": 39988, + "pivo": 18009, + "pivot": 31805, + "pivotal": 31432, + "pix": 6185, + "pix": 13088, + "pixar": 27493, + "pixel": 14384, + "pixel": 13241, + "pixelart": 18516, + "pixels": 34099, + "pixie": 35573, + "piyu": 30772, + "piyush": 36191, + "piyushgoyal": 45318, + "pizz": 3897, + "pizza": 4474, + "pizzas": 30647, + "pizzeria": 44174, + "pj": 12524, + "pj": 17179, + "pjnet": 22011, + "pjs": 36009, + "pk": 10149, + "pk": 10991, + "pkg": 49011, + "pkk": 47480, + "pknot": 41779, + "pkwy": 36827, + "pl": 712, + "pl": 5678, + "pla": 841, + "pla": 19945, + "plac": 2331, + "place": 14884, + "place": 1445, + "placed": 9729, + "placement": 16724, + "placements": 43885, + "placer": 49170, + "places": 4448, + "placing": 18531, + "plague": 25360, + "plaid": 23291, + "plain": 22776, + "plain": 10709, + "plains": 16345, + "plan": 1740, + "plan": 2970, + "pland": 24801, + "plane": 22728, + "plane": 5363, + "planes": 12581, + "planet": 16833, + "planet": 5172, + "planetary": 28361, + "planets": 22315, + "plank": 30991, + "plankton": 48249, + "plann": 6409, + "planned": 8169, + "planner": 18083, + "planners": 33664, + "planning": 4446, + "plano": 34063, + "plans": 4181, + "plant": 8521, + "plant": 3912, + "plantation": 20014, + "plantbased": 33720, + "planted": 14286, + "planter": 34453, + "planters": 43661, + "planting": 13922, + "plants": 5829, + "plaque": 16097, + "plaques": 45610, + "plar": 26754, + "plas": 45673, + "plasma": 24999, + "plaster": 31980, + "plastic": 15645, + "plastic": 6102, + "plasticpollution": 47129, + "plastics": 20999, + "plasticsurgery": 48555, + "plat": 3172, + "plata": 46456, + "plate": 28744, + "plate": 5135, + "plateau": 29301, + "plated": 21161, + "plates": 11485, + "platform": 5549, + "platforms": 13551, + "platin": 10267, + "plating": 44564, + "platinum": 10979, + "plato": 41101, + "platoon": 41254, + "platt": 44459, + "platt": 40097, + "platte": 46785, + "platter": 29071, + "platz": 40878, + "plau": 39139, + "play": 1222, + "play": 1453, + "playa": 23756, + "playable": 33885, + "playback": 39194, + "playbook": 34856, + "playboy": 24383, + "played": 3432, + "player": 24503, + "player": 2477, + "players": 3030, + "playful": 23871, + "playground": 15861, + "playhouse": 23254, + "playin": 24674, + "playing": 47368, + "playing": 1629, + "playlist": 9180, + "playlists": 47183, + "playo": 5804, + "playoff": 9655, + "playoffs": 9548, + "plays": 5134, + "playstation": 11332, + "playtime": 43037, + "playwright": 32070, + "plaza": 8943, + "plc": 16827, + "ple": 926, + "ple": 1619, + "plea": 21956, + "plead": 47539, + "pleads": 31425, + "plear": 21362, + "pleas": 8481, + "pleas": 48740, + "pleasant": 12271, + "please": 41074, + "please": 1474, + "pleased": 6107, + "pleasing": 32893, + "pleasure": 5854, + "pleasures": 29513, + "pledge": 11507, + "pledged": 36799, + "pledges": 26746, + "pledis": 41202, + "plein": 43429, + "plenary": 19891, + "plenty": 7524, + "pler": 17677, + "ples": 6248, + "pless": 39821, + "pless": 17059, + "plets": 43230, + "plex": 23765, + "plex": 15241, + "pley": 19543, + "pli": 30001, + "pli": 45797, + "plic": 5806, + "plicity": 19823, + "plight": 40317, + "plin": 44531, + "plin": 32335, + "pline": 25376, + "pling": 12899, + "plings": 31184, + "pll": 47629, + "pll": 25266, + "pln": 48755, + "plo": 1778, + "plo": 43523, + "plor": 34695, + "plot": 9918, + "plots": 25672, + "plotting": 30751, + "plough": 33811, + "plow": 38363, + "pls": 5572, + "plu": 2052, + "plug": 12628, + "plugged": 23261, + "plugin": 31278, + "plugins": 48797, + "plugs": 28083, + "plum": 26267, + "plum": 16202, + "plumb": 21769, + "plumber": 43478, + "plumbing": 24647, + "plume": 39495, + "plun": 15122, + "plunge": 26506, + "plur": 44664, + "plus": 3097, + "plush": 18926, + "pluto": 26380, + "ply": 17249, + "ply": 28705, + "plying": 36071, + "plym": 11907, + "plymouth": 13786, + "plz": 10538, + "pm": 13699, + "pm": 990, + "pmi": 41206, + "pmln": 23208, + "pmo": 18782, + "pmoindia": 20374, + "pms": 44223, + "pn": 14431, + "pn": 13774, + "pnc": 37148, + "pne": 30966, + "pneu": 28714, + "pneumonia": 42906, + "png": 20992, + "pnp": 25972, + "pnpp": 42175, + "pnw": 31521, + "po": 628, + "po": 3057, + "poa": 43912, + "poached": 27665, + "poaching": 35140, + "poc": 13232, + "poc": 27780, + "pocaly": 37987, + "pocalypse": 42307, + "poche": 38336, + "poche": 39022, + "pocket": 29147, + "pocket": 8504, + "pockets": 19566, + "pocon": 41850, + "pod": 3583, + "pod": 7446, + "podcast": 39654, + "podcast": 4294, + "podcasting": 40106, + "podcasts": 19392, + "pode": 33368, + "poder": 24960, + "podernfamily": 26620, + "podi": 32853, + "podium": 14093, + "pods": 18776, + "poe": 4746, + "poe": 19254, + "poem": 9436, + "poems": 15577, + "poet": 41019, + "poet": 9872, + "poetic": 26365, + "poetry": 20192, + "poetry": 6038, + "poetryday": 39255, + "poets": 19804, + "pof": 40850, + "poff": 28236, + "pogba": 25998, + "poign": 29682, + "poignant": 32138, + "poin": 9074, + "point": 13280, + "point": 2301, + "pointe": 24631, + "pointed": 20703, + "pointer": 29883, + "pointers": 36760, + "pointing": 19233, + "pointless": 33586, + "points": 3396, + "pois": 17008, + "poise": 45087, + "poised": 27354, + "poison": 30722, + "poison": 17074, + "poisoned": 43624, + "poisoning": 25750, + "poisonous": 37131, + "pok": 15387, + "poke": 6892, + "poke": 23186, + "pokemon": 16239, + "pokemon": 9528, + "pokemongo": 23985, + "poker": 30735, + "poker": 11865, + "pokes": 40221, + "poking": 49169, + "poké": 20656, + "pokémon": 22066, + "pol": 977, + "pol": 7649, + "pola": 43876, + "poland": 9834, + "polar": 21432, + "polar": 12214, + "polari": 27919, + "polaris": 37965, + "polarized": 48437, + "polaro": 25237, + "polaroid": 30427, + "poldark": 41322, + "pole": 26682, + "pole": 8170, + "poles": 22585, + "poli": 9675, + "poli": 5414, + "polic": 16126, + "police": 15535, + "police": 2120, + "policeman": 37713, + "policemen": 47946, + "polici": 10819, + "policies": 10993, + "policing": 20969, + "policy": 30173, + "policy": 4660, + "polio": 30533, + "polis": 16133, + "polish": 46941, + "polish": 9632, + "polished": 21478, + "polishing": 43629, + "polit": 2247, + "politan": 15337, + "polite": 31497, + "politi": 40597, + "politic": 33333, + "political": 37744, + "political": 4197, + "politically": 24323, + "politician": 15960, + "politicians": 12914, + "politico": 39403, + "politics": 4929, + "polk": 33317, + "polka": 29476, + "poll": 7032, + "pollen": 27651, + "pollin": 19152, + "pollinators": 36599, + "polling": 18024, + "pollo": 42755, + "pollock": 37614, + "polls": 11813, + "pollu": 8370, + "polluted": 43346, + "pollution": 10384, + "polly": 31204, + "polo": 35928, + "polo": 10229, + "poly": 6833, + "poly": 18367, + "polye": 31730, + "polyester": 38514, + "polym": 23626, + "polymer": 29993, + "polyne": 38892, + "polyvore": 24771, + "pom": 7548, + "pom": 24280, + "pome": 27963, + "pomegran": 29326, + "pomegranate": 32415, + "pomer": 35156, + "pomona": 41690, + "pompe": 18352, + "pompeii": 47775, + "pompeo": 34351, + "pompey": 35079, + "pon": 3809, + "pon": 22391, + "ponce": 43637, + "pond": 10750, + "ponder": 36863, + "pondering": 47395, + "ponds": 31033, + "pone": 32183, + "pong": 40546, + "pong": 17710, + "ponies": 34157, + "pons": 41255, + "pont": 47563, + "pont": 22997, + "ponte": 40892, + "ponti": 15527, + "pontiac": 25373, + "pontifex": 33566, + "ponty": 45152, + "pony": 24438, + "pony": 12678, + "ponytail": 43265, + "poo": 6601, + "poo": 14389, + "pooch": 37037, + "poodle": 34961, + "pooh": 27103, + "pooja": 35676, + "pool": 12484, + "pool": 2831, + "poole": 26290, + "pools": 18736, + "poolside": 35509, + "poon": 33799, + "poon": 36178, + "poop": 23310, + "poor": 14528, + "poor": 3665, + "poorest": 40771, + "poorly": 21101, + "pop": 6530, + "pop": 2852, + "popart": 47425, + "popcorn": 15034, + "pope": 16994, + "pope": 9283, + "popefrancis": 37254, + "poplar": 38726, + "popo": 38835, + "popo": 35572, + "popp": 13156, + "popped": 14934, + "poppies": 30385, + "poppin": 28536, + "popping": 18152, + "poppins": 41216, + "poppy": 32194, + "poppy": 15447, + "pops": 11705, + "popsic": 38481, + "popu": 3785, + "popul": 6593, + "popular": 15854, + "popular": 4368, + "popularity": 19235, + "populated": 38420, + "population": 8423, + "populations": 23797, + "populism": 48998, + "populist": 49376, + "popup": 33053, + "por": 817, + "por": 7697, + "pora": 23537, + "porcel": 19409, + "porcelain": 20451, + "porch": 17154, + "pore": 28267, + "pork": 40379, + "pork": 7897, + "poro": 48110, + "porridge": 34924, + "porsch": 48009, + "porsche": 44049, + "porsche": 8783, + "port": 1641, + "port": 1418, + "porta": 45037, + "portable": 11949, + "portage": 32087, + "portal": 14982, + "porte": 28654, + "ported": 16879, + "porter": 28319, + "porter": 10318, + "porters": 15670, + "portfoli": 45766, + "portfolio": 11938, + "porth": 37425, + "porti": 45760, + "porting": 26052, + "portion": 13739, + "portions": 22914, + "portland": 38366, + "portland": 8880, + "portman": 34755, + "porto": 24853, + "porto": 18947, + "portobello": 48025, + "portra": 4175, + "portrait": 39312, + "portrait": 5352, + "portraits": 14203, + "portray": 46282, + "portrayal": 39238, + "portrayed": 36093, + "ports": 7734, + "portsm": 17063, + "portsmouth": 19074, + "portu": 7159, + "portugal": 9503, + "portugue": 17498, + "portuguese": 18019, + "pos": 1780, + "pos": 11839, + "pose": 25478, + "pose": 4230, + "posed": 5206, + "posei": 47270, + "poser": 46899, + "poses": 9773, + "posey": 34852, + "posh": 26748, + "posing": 10518, + "posit": 28793, + "positi": 7895, + "position": 4657, + "positioned": 34482, + "positioning": 30657, + "positions": 12188, + "positive": 21811, + "positive": 4844, + "positively": 24688, + "positivity": 19966, + "poss": 39745, + "posse": 17414, + "posse": 28413, + "possess": 36810, + "possessed": 36220, + "possession": 16154, + "possessions": 40588, + "possi": 2521, + "possibilities": 17932, + "possibility": 18517, + "possible": 3134, + "possibly": 8601, + "possum": 38575, + "post": 3489, + "post": 1549, + "postage": 27570, + "postal": 21687, + "postcard": 14785, + "postcards": 23922, + "postdoc": 41013, + "posted": 4752, + "poster": 22881, + "poster": 3574, + "posters": 9673, + "postgame": 34873, + "postgraduate": 31997, + "posthum": 42410, + "posting": 7559, + "postman": 38285, + "postpon": 23247, + "postponed": 25097, + "posts": 7824, + "postseason": 24521, + "posture": 29681, + "posure": 35539, + "pot": 3547, + "pot": 5168, + "potam": 45825, + "potassi": 36889, + "potassium": 37147, + "potat": 5975, + "potato": 8527, + "potatoes": 11567, + "potd": 28765, + "pote": 41869, + "poten": 4454, + "potent": 26082, + "potenti": 44104, + "potential": 5100, + "potentially": 16508, + "potholes": 47506, + "potion": 46055, + "potom": 38848, + "potomac": 43372, + "pots": 19234, + "pott": 28698, + "potted": 48581, + "potter": 24975, + "potter": 9026, + "pottery": 18396, + "potts": 39839, + "potty": 43569, + "potus": 8740, + "pou": 9423, + "pouch": 26811, + "poul": 22485, + "poultry": 31005, + "poun": 33719, + "pound": 33809, + "pound": 10674, + "pounding": 46544, + "pounds": 10752, + "pour": 33112, + "pour": 8180, + "poured": 26621, + "pouring": 16098, + "pours": 26005, + "pout": 39621, + "poutine": 43768, + "pov": 25731, + "pover": 8432, + "pover": 29464, + "poverty": 9095, + "pow": 1317, + "pow": 17745, + "powder": 32427, + "powder": 9674, + "powe": 36955, + "powell": 13305, + "power": 2789, + "power": 1807, + "powerball": 47803, + "powered": 45442, + "powered": 7332, + "powerful": 4875, + "powerhouse": 22858, + "powering": 16231, + "powerof": 31961, + "powerpoint": 38940, + "powerrangers": 40620, + "powers": 9422, + "pox": 43649, + "poy": 34737, + "poyn": 47655, + "poz": 39953, + "pp": 604, + "pp": 4186, + "ppa": 10416, + "ppard": 23391, + "ppc": 27778, + "ppe": 24573, + "ppe": 11867, + "pped": 1873, + "ppel": 46523, + "ppen": 30663, + "pper": 6719, + "pper": 2440, + "ppers": 5232, + "ppery": 27833, + "ppet": 20744, + "ppets": 25849, + "ppg": 27433, + "ppi": 9594, + "ppie": 33795, + "ppin": 8076, + "pping": 22214, + "pping": 1682, + "ppings": 35687, + "ppl": 6758, + "pple": 12302, + "ppm": 42053, + "ppo": 10215, + "ppor": 37613, + "ppp": 14017, + "pps": 10683, + "ppv": 38864, + "ppy": 30360, + "ppy": 3860, + "pr": 766, + "pr": 4150, + "pra": 1865, + "pra": 19285, + "prab": 17901, + "prabhas": 29959, + "prabhu": 31529, + "prac": 2243, + "practi": 29995, + "practic": 5495, + "practical": 10792, + "practically": 25588, + "practice": 3349, + "practiced": 36749, + "practices": 9040, + "practicing": 12750, + "practise": 38938, + "practising": 36478, + "practiti": 19909, + "practitioner": 32591, + "practitioners": 29045, + "prada": 29456, + "pradesh": 15384, + "prado": 44141, + "prag": 31025, + "prague": 14940, + "prairi": 12629, + "prairie": 14753, + "praise": 10013, + "praised": 27649, + "praises": 23049, + "praising": 36961, + "prakash": 43708, + "prakash": 25366, + "pram": 47774, + "pran": 20048, + "prank": 23654, + "pras": 41562, + "prasad": 29562, + "prat": 23069, + "prati": 45773, + "pratt": 37863, + "pratt": 23396, + "prawn": 33102, + "prawns": 34903, + "pray": 12671, + "pray": 6041, + "prayed": 34665, + "prayer": 41452, + "prayer": 6583, + "prayers": 8393, + "prayfor": 18443, + "praying": 11550, + "prays": 46602, + "prc": 28781, + "pre": 679, + "pre": 2900, + "preach": 22545, + "preacher": 29357, + "preaching": 23642, + "precau": 36532, + "precautions": 47845, + "prece": 15361, + "preci": 5470, + "precin": 27908, + "precinct": 32587, + "precious": 8226, + "precipit": 27463, + "precipitation": 33399, + "precise": 24457, + "precisely": 34954, + "precision": 44021, + "precision": 15621, + "pred": 40370, + "predat": 13364, + "predator": 20653, + "predators": 25569, + "prede": 38454, + "predecess": 38963, + "predic": 4876, + "predict": 16900, + "predictable": 25344, + "predicted": 18702, + "predicting": 30414, + "prediction": 16296, + "predictions": 15125, + "predictive": 29798, + "predicts": 25960, + "preds": 40125, + "pree": 47026, + "preet": 30131, + "prefe": 14542, + "prefecture": 32890, + "prefer": 33426, + "prefer": 11450, + "preference": 35057, + "preferences": 38118, + "preferred": 18772, + "prefers": 38528, + "pregame": 18575, + "pregn": 7190, + "pregnancy": 12769, + "pregnant": 11195, + "prehistoric": 32750, + "prejudice": 28337, + "preli": 15523, + "prelimin": 19990, + "preliminary": 20997, + "prelims": 43223, + "prelude": 42966, + "prem": 32090, + "prem": 21724, + "premature": 39253, + "premi": 2413, + "premier": 16996, + "premier": 5539, + "premiere": 5367, + "premiered": 27652, + "premieres": 19907, + "premiering": 32615, + "premierleague": 22608, + "premiers": 44883, + "premiership": 23665, + "premiosm": 38460, + "premiosmtvmiaw": 38630, + "premise": 45952, + "premises": 27266, + "premium": 8011, + "pren": 20801, + "preneur": 46288, + "preorder": 16703, + "preorders": 45985, + "prep": 6430, + "prep": 7277, + "prepa": 26270, + "prepaid": 42934, + "prepar": 4968, + "preparation": 11651, + "preparations": 19135, + "prepare": 7014, + "prepared": 7677, + "preparedness": 29492, + "prepares": 16375, + "preparing": 7365, + "prepped": 34379, + "prepping": 16459, + "preps": 14765, + "prequel": 40461, + "pres": 1385, + "pres": 8529, + "presale": 27135, + "presby": 30447, + "presbyter": 33959, + "presbyterian": 35370, + "preschool": 24354, + "prescott": 29392, + "prescri": 14851, + "prescribed": 36968, + "prescription": 23061, + "preseason": 13813, + "presen": 16742, + "presence": 8848, + "present": 2344, + "present": 2881, + "presentation": 4594, + "presentations": 16998, + "presented": 4587, + "presenter": 18587, + "presenters": 32759, + "presenting": 5339, + "presents": 4215, + "preserv": 17616, + "preservation": 21074, + "preserve": 15570, + "preserved": 23161, + "preserves": 44881, + "preserving": 32315, + "presi": 1697, + "presiden": 43374, + "presidency": 18077, + "president": 19900, + "president": 1940, + "presidente": 47363, + "presidenti": 48297, + "presidential": 8503, + "presidents": 16726, + "presiding": 45298, + "presley": 30013, + "press": 4124, + "press": 2124, + "pressed": 20080, + "presser": 27826, + "presses": 33748, + "pressing": 20893, + "pressure": 6083, + "pressures": 38487, + "prest": 41840, + "presti": 12245, + "prestige": 29328, + "prestigious": 15888, + "presto": 42211, + "preston": 37335, + "preston": 15179, + "presu": 21667, + "presumably": 42562, + "pret": 9652, + "preten": 15871, + "pretend": 18111, + "pretending": 21306, + "pretoria": 36080, + "prett": 46667, + "prettier": 31745, + "prettiest": 22866, + "pretty": 18286, + "pretty": 2111, + "pretz": 24890, + "pretzel": 36707, + "pretzels": 45468, + "prev": 20274, + "prevail": 31637, + "prevalence": 41729, + "prevalent": 46260, + "preven": 29382, + "prevent": 26436, + "prevent": 7968, + "preventable": 44250, + "prevented": 35356, + "preventing": 21756, + "prevention": 9500, + "preventive": 40949, + "prevents": 31746, + "preview": 4449, + "previews": 20279, + "previous": 9252, + "previously": 13359, + "prey": 17131, + "prez": 17956, + "pri": 955, + "pri": 23400, + "pric": 24275, + "price": 13254, + "price": 2827, + "priced": 16934, + "priceless": 15743, + "prices": 5954, + "pricing": 14800, + "prick": 43921, + "prick": 46516, + "pride": 15323, + "pride": 3436, + "pridemonth": 41410, + "prie": 22477, + "priest": 38756, + "priest": 14222, + "priests": 30005, + "prim": 22004, + "prima": 35611, + "prima": 33277, + "primal": 36604, + "primar": 21579, + "primaries": 46126, + "primarily": 29465, + "primark": 48329, + "primary": 35024, + "primary": 5814, + "primavera": 44899, + "prime": 14162, + "prime": 5183, + "primed": 45694, + "primer": 22388, + "primetime": 29763, + "primitive": 37467, + "primo": 43215, + "primrose": 45891, + "prin": 1588, + "prince": 9457, + "prince": 4735, + "princes": 45329, + "princes": 30136, + "princess": 24123, + "princess": 5079, + "princesses": 34161, + "princeton": 22433, + "princi": 5129, + "principal": 33599, + "principal": 8860, + "principals": 27524, + "principle": 19595, + "principles": 13755, + "print": 17851, + "print": 3557, + "printable": 29648, + "printed": 7978, + "printer": 14521, + "printers": 27881, + "printing": 7369, + "printmaking": 38669, + "prints": 7704, + "prior": 20328, + "prior": 10572, + "priorit": 47773, + "prioriti": 28822, + "priorities": 15232, + "prioritize": 46715, + "priority": 12451, + "priory": 38665, + "prisc": 32468, + "priscilla": 42396, + "prise": 23343, + "prism": 49311, + "prism": 34356, + "prison": 9281, + "prison": 6622, + "prisoner": 21427, + "prisoners": 17460, + "prisons": 26607, + "pristine": 30618, + "prit": 41668, + "prit": 37523, + "prith": 39173, + "prius": 43561, + "priv": 3270, + "privacy": 10437, + "private": 20362, + "private": 4439, + "privately": 32970, + "privati": 27379, + "privi": 8367, + "privileg": 18015, + "privilege": 11537, + "privileged": 18166, + "prix": 10875, + "priya": 31275, + "priyan": 16488, + "priyanka": 31959, + "priyankach": 30030, + "priyankachopra": 30264, + "prize": 48222, + "prize": 4521, + "prized": 38769, + "prizes": 9268, + "prk": 37094, + "pro": 644, + "pro": 2630, + "proactive": 33364, + "prob": 17706, + "prob": 24007, + "probab": 3907, + "probability": 32637, + "probable": 42444, + "probably": 4047, + "probation": 36531, + "probe": 14359, + "probes": 48564, + "probiotics": 49395, + "proble": 2719, + "problem": 4324, + "problematic": 33767, + "problems": 4671, + "probs": 16330, + "probz": 34243, + "proc": 38417, + "proce": 4076, + "procedu": 18204, + "procedural": 48177, + "procedure": 20163, + "procedures": 21109, + "proceed": 26664, + "proceed": 33894, + "proceedings": 26953, + "proceeds": 11882, + "process": 17291, + "process": 4078, + "processed": 23816, + "processes": 15169, + "processing": 11737, + "procession": 26288, + "processor": 22838, + "processors": 43634, + "proclaimed": 34489, + "proclamation": 32065, + "procra": 25361, + "procrastin": 25586, + "procrastination": 42825, + "procreate": 39336, + "proctor": 47204, + "procu": 21001, + "procurement": 23733, + "prod": 44349, + "prod": 11991, + "prodi": 27759, + "prodigy": 31973, + "produ": 27852, + "produc": 1471, + "produce": 7529, + "produced": 7479, + "producer": 7064, + "producers": 13883, + "produces": 19940, + "producing": 13579, + "product": 32602, + "product": 4306, + "production": 4146, + "productions": 14166, + "productive": 9697, + "productivity": 12800, + "products": 3964, + "prof": 15043, + "prof": 5488, + "profe": 2611, + "profess": 5486, + "professi": 3705, + "profession": 8104, + "profession": 19671, + "professional": 46007, + "professional": 4774, + "professionalism": 41252, + "professionally": 33892, + "professionals": 10165, + "professor": 47302, + "professor": 6092, + "professors": 27758, + "profici": 34685, + "profile": 14291, + "profile": 6444, + "profiles": 22070, + "profiling": 37123, + "profit": 16941, + "profit": 7909, + "profitable": 25465, + "profits": 13410, + "profound": 48245, + "profound": 22998, + "profs": 19260, + "prog": 22219, + "progno": 46070, + "program": 4162, + "program": 2737, + "programme": 6322, + "programmer": 37001, + "programmes": 20468, + "programming": 10831, + "programs": 7345, + "progre": 7069, + "progress": 4421, + "progressi": 23297, + "progressing": 32346, + "progression": 24772, + "progressive": 12208, + "progressives": 41709, + "prohi": 41124, + "prohib": 45040, + "prohibition": 34440, + "proj": 39156, + "proje": 48345, + "projec": 1610, + "project": 15911, + "project": 1965, + "projected": 22873, + "projection": 22384, + "projections": 34638, + "projector": 27816, + "projects": 5090, + "proli": 19710, + "prolife": 32126, + "prolifer": 39018, + "prolific": 27839, + "prolly": 45968, + "prolon": 35379, + "prolonged": 41972, + "prom": 40363, + "prom": 7944, + "prome": 34355, + "promen": 33578, + "promenade": 35522, + "promethe": 44183, + "promin": 35217, + "prominent": 19172, + "promis": 3963, + "promise": 6745, + "promised": 11516, + "promises": 12064, + "promising": 14183, + "promo": 3037, + "promo": 6755, + "promos": 35044, + "promote": 47384, + "promote": 8003, + "promoted": 16395, + "promoter": 33081, + "promotes": 20169, + "promoting": 9695, + "promotion": 9259, + "promotional": 17619, + "promotions": 19142, + "promp": 11671, + "prompt": 20198, + "prompted": 45746, + "prompts": 33490, + "proms": 37759, + "pron": 13285, + "prone": 30964, + "pronoun": 23022, + "pronounce": 40489, + "pronounced": 34109, + "pronto": 44296, + "proof": 17020, + "proof": 5248, + "proofing": 35679, + "proofs": 41023, + "prop": 19123, + "prop": 16254, + "propag": 12151, + "propaganda": 14718, + "propane": 45546, + "propel": 48439, + "propeller": 47404, + "proper": 3577, + "proper": 8205, + "properly": 12560, + "properties": 10922, + "property": 26486, + "property": 5043, + "prophe": 9662, + "prophecy": 32501, + "prophet": 15549, + "prophetic": 47476, + "prophets": 39441, + "propor": 35016, + "proportion": 35775, + "proportions": 39391, + "propos": 9455, + "proposal": 12139, + "proposals": 20568, + "propose": 28471, + "proposed": 10615, + "proposes": 27133, + "proposing": 42631, + "proposition": 44780, + "propri": 28243, + "props": 15249, + "propulsion": 49380, + "pros": 33925, + "pros": 14147, + "prosciutto": 46565, + "prose": 47063, + "prose": 28675, + "prosecco": 28839, + "prosecu": 12136, + "prosecution": 30902, + "prosecutor": 23736, + "prosecutors": 31656, + "prosp": 24242, + "prospec": 12693, + "prospect": 11211, + "prospective": 28034, + "prospects": 15372, + "prosper": 16121, + "prosper": 33526, + "prosperity": 17203, + "prosperous": 28252, + "prost": 47923, + "prostate": 28808, + "prostatec": 49064, + "prosthetic": 44602, + "prostitu": 37333, + "protag": 28950, + "protagonist": 38183, + "prote": 1845, + "protec": 5640, + "protect": 25563, + "protect": 4817, + "protected": 12266, + "protecting": 11710, + "protection": 6238, + "protections": 33772, + "protective": 17028, + "protector": 20441, + "protectors": 45039, + "protects": 21889, + "protein": 8088, + "proteins": 28661, + "protest": 6279, + "protestant": 46945, + "protested": 48089, + "protester": 42073, + "protesters": 12660, + "protesting": 18788, + "protestors": 27822, + "protests": 12450, + "proto": 8672, + "proto": 44958, + "protocol": 19938, + "protocols": 39631, + "proton": 40009, + "prototype": 16675, + "prototyping": 42081, + "prou": 5739, + "proud": 11080, + "proud": 1679, + "prouder": 39585, + "proudest": 46806, + "proudly": 11203, + "proudof": 48184, + "proudtobe": 35043, + "prov": 23772, + "prov": 35021, + "prove": 10107, + "proved": 16473, + "proven": 35405, + "proven": 14569, + "provence": 28067, + "prover": 18312, + "proverb": 34419, + "proverbs": 27016, + "proves": 16119, + "provi": 2289, + "provide": 4832, + "provided": 9046, + "providence": 19331, + "provider": 14409, + "providers": 17120, + "provides": 7161, + "providing": 7250, + "provin": 12074, + "province": 8978, + "provinces": 35050, + "provincial": 16002, + "proving": 18055, + "provision": 30148, + "provisional": 36008, + "provisions": 39269, + "provo": 15367, + "provoc": 31618, + "provocative": 43809, + "provoking": 25510, + "provost": 36627, + "prow": 38737, + "prowrestling": 39825, + "prox": 41616, + "proxim": 31436, + "proximity": 38298, + "proxy": 31680, + "prs": 23879, + "pru": 12961, + "pruitt": 39453, + "prun": 29029, + "pruning": 48133, + "pry": 31965, + "pryor": 43375, + "ps": 3982, + "ps": 814, + "psa": 14031, + "psal": 13859, + "psalm": 17995, + "psalms": 35003, + "psb": 37017, + "psc": 43118, + "psd": 28810, + "pse": 19737, + "pse": 5423, + "pseu": 24919, + "pseudo": 46618, + "psg": 17123, + "psi": 45848, + "psi": 24533, + "psic": 29299, + "psis": 33041, + "psl": 21373, + "psn": 36781, + "pso": 27045, + "pson": 7487, + "psori": 44688, + "psp": 32769, + "pss": 35718, + "pss": 42535, + "psst": 47814, + "pst": 12692, + "psu": 41286, + "psu": 28338, + "psv": 44530, + "psy": 3576, + "psy": 11056, + "psych": 31041, + "psych": 20509, + "psyched": 19932, + "psyched": 35199, + "psychedelic": 23292, + "psychi": 18147, + "psychiatric": 30578, + "psychiatry": 39706, + "psychic": 24916, + "psycho": 6472, + "psycho": 22154, + "psychological": 18153, + "psychologist": 32827, + "psychology": 12352, + "psychop": 30112, + "psychotic": 48774, + "pt": 11139, + "pt": 1459, + "pta": 11586, + "ptbo": 40481, + "ptc": 44646, + "pte": 47804, + "pter": 49323, + "pti": 29375, + "pti": 10491, + "ptic": 20670, + "ption": 3479, + "ptions": 24963, + "pto": 31372, + "pto": 34092, + "pton": 19780, + "pts": 5886, + "ptsd": 23973, + "ptv": 42402, + "pu": 755, + "pu": 11780, + "pub": 20720, + "pub": 6301, + "puberty": 44122, + "pubg": 31496, + "publ": 3434, + "publi": 1617, + "public": 3592, + "public": 2122, + "publica": 49007, + "publication": 13538, + "publications": 27334, + "publichealth": 35872, + "publicity": 20831, + "publicly": 18554, + "publish": 19032, + "published": 4311, + "publisher": 20455, + "publishers": 25222, + "publishes": 35633, + "publishing": 10994, + "publix": 47985, + "pubs": 21099, + "puc": 48779, + "puck": 17550, + "pud": 39234, + "pudding": 14025, + "puddle": 33545, + "pue": 20161, + "pueblo": 33076, + "puer": 8968, + "puerto": 12289, + "puertor": 22757, + "puertorico": 26356, + "puff": 44477, + "puff": 17184, + "puffin": 47632, + "puffs": 47453, + "puffy": 49245, + "pug": 20950, + "pug": 17739, + "pugchat": 42266, + "pugh": 41302, + "puglia": 38345, + "pugs": 39425, + "puj": 46163, + "puja": 33753, + "puk": 31811, + "pul": 2469, + "pul": 40512, + "pula": 45856, + "puli": 47293, + "pulit": 27745, + "pulitzer": 31419, + "pull": 20155, + "pull": 6857, + "pulled": 8525, + "pulling": 12897, + "pullman": 40203, + "pullover": 44020, + "pulls": 16041, + "pulmon": 32613, + "pulmonary": 39132, + "pulp": 25410, + "pulse": 40091, + "pulse": 12485, + "pulses": 42177, + "pulsion": 35398, + "pum": 37497, + "puma": 20858, + "pump": 5179, + "pump": 9173, + "pumped": 12796, + "pumping": 25150, + "pumpkin": 36386, + "pumpkin": 8842, + "pumpkins": 23787, + "pumps": 18540, + "pun": 2707, + "pun": 19929, + "punc": 43907, + "punch": 29332, + "punch": 10730, + "punched": 31689, + "punches": 35279, + "punching": 33468, + "punctu": 31565, + "punctuation": 47051, + "pundit": 41466, + "pune": 32593, + "pune": 14488, + "pung": 45420, + "puni": 11479, + "punish": 34569, + "punished": 31598, + "punisher": 38509, + "punishment": 19099, + "punjab": 19405, + "punjab": 12883, + "punjabi": 25430, + "punk": 28933, + "punk": 7246, + "punks": 47171, + "puns": 35231, + "punt": 32699, + "punta": 34112, + "punter": 47092, + "pup": 11926, + "pup": 11302, + "pupil": 27265, + "pupils": 13628, + "pupp": 7116, + "puppet": 18439, + "puppets": 28475, + "puppies": 14820, + "puppy": 25431, + "puppy": 6829, + "puppylove": 40849, + "pups": 20778, + "pur": 1727, + "pur": 6265, + "pura": 25596, + "puram": 46174, + "purcell": 46065, + "purch": 8384, + "purchase": 5481, + "purchased": 13399, + "purchases": 21887, + "purchasing": 20718, + "purdu": 40691, + "purdue": 22280, + "pure": 14202, + "pure": 5979, + "puree": 45474, + "purely": 32459, + "puremichigan": 39783, + "purest": 45497, + "purge": 33514, + "puri": 16910, + "puri": 21974, + "purification": 47724, + "purity": 29780, + "purple": 17837, + "purple": 5496, + "purpose": 33492, + "purpose": 7391, + "purposes": 22020, + "purr": 49262, + "purr": 46343, + "purse": 16480, + "pursue": 19463, + "pursuing": 26424, + "pursuit": 16469, + "purée": 40981, + "pus": 13841, + "pusa": 40825, + "push": 16028, + "push": 6831, + "pushaw": 35407, + "pushaward": 35448, + "pushawards": 47184, + "pushed": 16155, + "pushes": 23828, + "pushing": 11549, + "put": 29535, + "put": 1983, + "putin": 10693, + "putnam": 40235, + "puts": 7898, + "putt": 30279, + "putter": 44723, + "putting": 5154, + "puzz": 19760, + "puzzle": 12875, + "puzzles": 27986, + "pv": 14517, + "pv": 13495, + "pvc": 26959, + "pvp": 44172, + "pvt": 29898, + "pw": 19419, + "pw": 16067, + "pwc": 22965, + "px": 24790, + "px": 10262, + "pxrtg": 36262, + "py": 4005, + "py": 7504, + "pye": 31099, + "pyeongchang": 36066, + "pyg": 41450, + "pyram": 14405, + "pyramid": 18725, + "pyramids": 36877, + "pyrene": 36740, + "pyrenees": 39744, + "pyro": 39762, + "python": 13370, + "pz": 48361, + "pé": 43167, + "q": 80, + "q": 336, + "qa": 24944, + "qa": 16360, + "qad": 27844, + "qadri": 35672, + "qaeda": 31246, + "qanda": 48672, + "qanon": 19182, + "qant": 35404, + "qantas": 43250, + "qatar": 32804, + "qatar": 10872, + "qb": 8073, + "qbs": 38188, + "qc": 17406, + "qe": 30974, + "qf": 27215, + "qi": 25054, + "qi": 11256, + "qing": 46522, + "qing": 34339, + "ql": 28366, + "qld": 23039, + "qld": 13765, + "qldpol": 42296, + "qm": 42148, + "qotd": 24504, + "qpr": 24788, + "qq": 31960, + "qr": 18193, + "qs": 14364, + "qt": 15013, + "qtr": 44803, + "qu": 666, + "qu": 28646, + "qua": 20363, + "quack": 45575, + "quad": 11656, + "quad": 13419, + "quadcopter": 39792, + "quadru": 35831, + "quaid": 34265, + "quail": 34392, + "quaint": 45976, + "quake": 8421, + "quaker": 43395, + "quakes": 24572, + "qual": 9979, + "qual": 32405, + "qualcomm": 38683, + "quali": 4574, + "qualification": 21508, + "qualifications": 35225, + "qualified": 11927, + "qualifier": 18733, + "qualifiers": 21388, + "qualifies": 35820, + "qualify": 17019, + "qualifying": 11895, + "qualitative": 45847, + "qualities": 20488, + "quality": 28545, + "quality": 3027, + "quan": 11669, + "quan": 27490, + "quand": 28198, + "quant": 15050, + "quanti": 31540, + "quantitative": 40583, + "quantities": 33917, + "quantity": 26920, + "quantum": 15320, + "quar": 3856, + "quare": 42549, + "quarry": 27601, + "quart": 7851, + "quarter": 8816, + "quarter": 6632, + "quarterback": 16545, + "quarterfinal": 37992, + "quarterfinals": 28971, + "quarterly": 23350, + "quarters": 10146, + "quartet": 18056, + "quartz": 17752, + "quat": 25715, + "quattro": 40300, + "quay": 40276, + "quay": 17304, + "que": 1147, + "que": 2319, + "quebec": 15373, + "queen": 6407, + "queen": 2997, + "queenof": 44398, + "queens": 22943, + "queens": 9330, + "queensland": 15168, + "queer": 38874, + "queer": 18161, + "quel": 39774, + "quel": 21879, + "quen": 23876, + "quen": 38324, + "quent": 23808, + "quentin": 27530, + "quer": 17378, + "quer": 26859, + "quered": 23210, + "queries": 32958, + "querque": 30338, + "query": 27464, + "ques": 25328, + "ques": 7715, + "queso": 40110, + "quest": 31653, + "quest": 4846, + "questi": 2391, + "question": 18961, + "question": 4382, + "questionable": 30733, + "questioned": 31847, + "questioning": 24887, + "questions": 3883, + "quests": 44611, + "quet": 8513, + "quets": 39055, + "quetta": 38326, + "quette": 18993, + "queu": 32705, + "queue": 18549, + "queues": 40649, + "queuing": 44082, + "quez": 18677, + "quezon": 41117, + "qui": 1912, + "qui": 18046, + "quic": 26474, + "quiche": 47723, + "quick": 5969, + "quick": 3712, + "quicker": 29211, + "quickest": 37734, + "quickly": 7787, + "quid": 30732, + "quie": 43875, + "quien": 43482, + "quiere": 42723, + "quiero": 32567, + "quiet": 17853, + "quiet": 7557, + "quietly": 22208, + "quig": 44690, + "quil": 12305, + "quill": 48951, + "quilt": 23977, + "quilted": 46052, + "quin": 8607, + "quin": 17167, + "quincy": 27640, + "quind": 32339, + "quinn": 12306, + "quinoa": 26703, + "quins": 39701, + "quint": 26898, + "quinta": 47446, + "quinte": 22098, + "quintess": 37538, + "quintet": 35125, + "quipment": 42813, + "quir": 15943, + "quirky": 25044, + "quis": 15064, + "quist": 25128, + "quit": 19358, + "quit": 11140, + "quite": 4135, + "quito": 35828, + "quits": 32505, + "quitting": 33871, + "quity": 33133, + "quiz": 31197, + "quiz": 8344, + "quizz": 35041, + "quo": 3046, + "quo": 28127, + "quoi": 45549, + "quot": 5452, + "quot": 47587, + "quota": 42097, + "quotation": 49195, + "quote": 15446, + "quote": 4020, + "quoted": 27706, + "quoteoftheday": 19975, + "quotes": 5808, + "quoting": 31651, + "qur": 37782, + "quran": 19690, + "qureshi": 46307, + "qvist": 42322, + "qx": 45038, + "r": 81, + "r": 337, + "ra": 559, + "ra": 1735, + "raa": 44344, + "rab": 14816, + "rab": 33224, + "rabb": 6875, + "rabbi": 20959, + "rabbit": 10274, + "rabbits": 27028, + "rabhu": 25806, + "rable": 10182, + "rac": 1773, + "rac": 30462, + "raccoon": 29516, + "race": 10978, + "race": 2471, + "racec": 18814, + "racecourse": 25036, + "raced": 36021, + "racer": 16798, + "racers": 33603, + "races": 8605, + "raceway": 24650, + "rach": 6876, + "rach": 33429, + "racha": 21952, + "racha": 35022, + "rachael": 29095, + "rachel": 13511, + "rachel": 8029, + "raci": 33381, + "racial": 13801, + "racially": 43577, + "racing": 23306, + "racing": 3699, + "racism": 11276, + "racist": 9684, + "racists": 41777, + "rack": 24600, + "rack": 12034, + "racket": 37691, + "racks": 21191, + "rad": 4473, + "rad": 8238, + "rada": 30437, + "radar": 9672, + "radcliffe": 33096, + "rade": 44494, + "rade": 17911, + "rader": 45002, + "radford": 45800, + "radha": 43122, + "radi": 5772, + "radial": 42028, + "radiance": 45670, + "radiant": 25614, + "radiation": 18210, + "radiator": 39372, + "radic": 18082, + "radical": 13712, + "radicals": 45903, + "radio": 7176, + "radio": 2638, + "radioactive": 34704, + "radiodisney": 36483, + "radiohead": 39472, + "radiology": 29684, + "radios": 43669, + "radish": 37789, + "radius": 37570, + "rado": 29784, + "rae": 21646, + "rae": 15051, + "rael": 45390, + "raer": 44561, + "raf": 11495, + "raf": 11490, + "rafa": 14352, + "rafa": 24850, + "rafael": 38221, + "rafael": 19216, + "rafaelnadal": 49219, + "raff": 34900, + "raffic": 32928, + "raffle": 13752, + "raffles": 43489, + "rafi": 35304, + "raft": 9233, + "rafting": 36309, + "rag": 13958, + "rag": 20687, + "rage": 8593, + "rages": 34253, + "ragh": 35642, + "ragha": 40972, + "raging": 25015, + "ragn": 24125, + "ragnar": 34385, + "ragnarok": 41856, + "ragon": 34768, + "rags": 47838, + "rah": 12277, + "rah": 8766, + "raheem": 43317, + "rahim": 24152, + "rahman": 19680, + "rahu": 13129, + "rahul": 37239, + "rahul": 17440, + "rahulg": 27510, + "rahulgandhi": 28293, + "rai": 9165, + "rai": 9638, + "raid": 6877, + "raided": 43417, + "raider": 27368, + "raider": 21455, + "raidernation": 47901, + "raiders": 11817, + "raids": 26655, + "rail": 4573, + "rail": 6879, + "raila": 47273, + "railminindia": 35557, + "railroad": 17080, + "rails": 23427, + "railway": 27614, + "railway": 7856, + "railwayana": 46750, + "railways": 20765, + "raim": 45785, + "rain": 3128, + "rain": 2443, + "raina": 30564, + "rainbow": 24562, + "rainbow": 6286, + "rainbows": 30483, + "raine": 49038, + "raine": 6871, + "rained": 32310, + "rainf": 15024, + "rainfall": 15350, + "rainforest": 22823, + "rainier": 37850, + "raining": 13964, + "rains": 14272, + "rainy": 10222, + "rais": 14729, + "raise": 24249, + "raise": 5078, + "raised": 6027, + "raiser": 33555, + "raises": 13297, + "raisethe": 47109, + "raisin": 36864, + "raising": 6883, + "raj": 5958, + "raj": 10813, + "raja": 46069, + "raja": 19150, + "rajan": 46595, + "rajas": 16185, + "rajasthan": 18017, + "raje": 21899, + "rajesh": 43602, + "raji": 27569, + "rajini": 29600, + "rajini": 40622, + "rajinikanth": 32922, + "rajiv": 40197, + "rajkumar": 49304, + "rajput": 47572, + "raju": 47029, + "rak": 13523, + "rak": 26287, + "rake": 26825, + "rake": 32712, + "rakesh": 41083, + "ral": 8062, + "ral": 1406, + "rale": 14192, + "raleigh": 18207, + "rall": 23249, + "rallies": 25230, + "rally": 18882, + "rally": 5041, + "rallying": 36836, + "ralph": 25290, + "ralph": 12234, + "ram": 1976, + "ram": 2007, + "rama": 22112, + "ramad": 12736, + "ramadan": 15547, + "ramadhan": 47415, + "raman": 39816, + "ramapho": 43963, + "ramaphosa": 44993, + "ramatta": 49112, + "rambo": 41855, + "ramcharan": 45275, + "rame": 47745, + "ramen": 18892, + "ramesh": 48640, + "ramesh": 40186, + "rami": 43016, + "ramirez": 23877, + "ramon": 27958, + "ramone": 47201, + "ramos": 21046, + "ramp": 14271, + "rampage": 32077, + "rampant": 41985, + "ramps": 35257, + "rams": 10292, + "ramsay": 26259, + "ramsey": 19215, + "ran": 1433, + "ran": 4031, + "rana": 22143, + "ranbir": 40881, + "rance": 29034, + "ranch": 43955, + "ranch": 10659, + "rancho": 26258, + "rand": 5628, + "rand": 18718, + "randall": 23639, + "rande": 21469, + "randolph": 29899, + "random": 11396, + "random": 6160, + "randomly": 17272, + "rands": 39153, + "randy": 29479, + "randy": 13279, + "rane": 28852, + "rang": 4043, + "rang": 24377, + "range": 13627, + "range": 3818, + "ranger": 31472, + "ranger": 13593, + "rangers": 7664, + "ranges": 25685, + "ranging": 25946, + "rani": 29264, + "rani": 22631, + "rank": 11501, + "ranked": 8307, + "rankin": 37539, + "ranking": 12347, + "rankings": 12596, + "ranks": 14469, + "rano": 18608, + "rans": 46259, + "ransom": 28523, + "ransom": 34646, + "ransomware": 33815, + "rant": 46467, + "rant": 9819, + "rants": 34014, + "ranveer": 32402, + "ranveer": 41482, + "ranveerofficial": 42116, + "rao": 16913, + "rap": 7773, + "rap": 7348, + "rape": 46099, + "rape": 10070, + "raped": 23700, + "rapha": 22754, + "raphael": 30091, + "rapi": 8610, + "rapid": 47697, + "rapid": 12205, + "rapidly": 16710, + "rapids": 18848, + "raping": 44926, + "rapist": 33360, + "rapp": 19283, + "rapper": 11860, + "rappers": 30315, + "rapping": 42864, + "raps": 37887, + "raptor": 26762, + "raptors": 17035, + "raq": 39787, + "raq": 43312, + "raqqa": 47074, + "raquel": 44338, + "rar": 26819, + "rar": 24605, + "rard": 21012, + "rare": 18992, + "rare": 3865, + "rarely": 17315, + "rarest": 43237, + "rarity": 45862, + "ras": 23492, + "ras": 8224, + "rasc": 30085, + "rascal": 43481, + "rash": 14917, + "rash": 30608, + "rashad": 46527, + "rasheed": 41638, + "rashi": 19426, + "rashid": 26757, + "rasp": 10487, + "raspberries": 37742, + "raspberry": 40162, + "raspberry": 13615, + "raspberrypi": 43934, + "rass": 45654, + "rasta": 47002, + "rat": 3806, + "rat": 8985, + "rata": 28568, + "ratchet": 25078, + "rate": 5068, + "rated": 8183, + "rates": 6864, + "rath": 18268, + "rath": 39772, + "rather": 5252, + "rati": 11486, + "rating": 10567, + "ratings": 14176, + "ratio": 15893, + "ration": 27002, + "ration": 35662, + "rational": 33086, + "ratna": 49078, + "ratri": 32288, + "rats": 19043, + "ratt": 20737, + "ratt": 34785, + "rattle": 40824, + "rattle": 41839, + "rau": 27744, + "raul": 30218, + "raun": 41169, + "rav": 14367, + "rav": 23606, + "rave": 38784, + "rave": 17601, + "ravel": 27927, + "raven": 10269, + "raven": 16803, + "ravens": 17946, + "ravi": 22947, + "ravi": 19538, + "ravin": 39099, + "raving": 45807, + "raviol": 41104, + "ravioli": 43460, + "raw": 10166, + "raw": 6323, + "rawlings": 40662, + "rax": 38520, + "ray": 5312, + "ray": 3077, + "raya": 29991, + "raymond": 16683, + "rayn": 47852, + "rayon": 47900, + "rays": 11064, + "raz": 9700, + "raz": 19087, + "raza": 37724, + "razer": 33832, + "razor": 24934, + "razor": 21300, + "razz": 43769, + "rb": 12740, + "rb": 7477, + "rbc": 37500, + "rbi": 15687, + "rbs": 29102, + "rc": 7575, + "rc": 7457, + "rca": 33942, + "rcb": 45240, + "rcmp": 31489, + "rcn": 49370, + "rctid": 49223, + "rd": 13501, + "rd": 1973, + "rda": 45755, + "rdr": 44364, + "rds": 32378, + "re": 515, + "re": 810, + "rea": 11521, + "reach": 4483, + "reach": 4279, + "reached": 6878, + "reaches": 14462, + "reaching": 11358, + "react": 36566, + "react": 15065, + "reacted": 42515, + "reacting": 40595, + "reaction": 7189, + "reactions": 18438, + "reactive": 42072, + "reactjs": 46173, + "reactor": 32037, + "reacts": 23115, + "read": 933, + "read": 1199, + "reader": 9884, + "readers": 10335, + "readiness": 28131, + "reading": 17556, + "reading": 2337, + "readingfc": 47428, + "readings": 23361, + "reads": 6597, + "ready": 17351, + "ready": 1112, + "reagan": 17767, + "real": 2017, + "real": 1532, + "realdonaldtrump": 7025, + "reale": 5930, + "realest": 45855, + "realestate": 32937, + "realestate": 6569, + "reali": 4185, + "realis": 38114, + "realise": 14773, + "realised": 17945, + "realising": 39537, + "realism": 20024, + "realist": 30248, + "realistic": 16157, + "realities": 32443, + "reality": 46802, + "reality": 5004, + "realization": 40402, + "realize": 7538, + "realized": 10489, + "realizes": 42918, + "realizing": 23284, + "reall": 39686, + "really": 43249, + "really": 1414, + "realm": 23083, + "realmadrid": 27866, + "realms": 43033, + "realness": 46761, + "realtime": 44002, + "realtime": 38203, + "realtor": 18038, + "realtors": 31759, + "realty": 20471, + "ream": 37242, + "ream": 15219, + "rean": 48477, + "reap": 31334, + "reaper": 29922, + "rear": 39652, + "rear": 10223, + "reas": 9121, + "reason": 12882, + "reason": 3893, + "reasonable": 18558, + "reasonably": 38589, + "reasoning": 30341, + "reasons": 5686, + "reau": 32398, + "reb": 12370, + "reb": 18796, + "reba": 48543, + "rebate": 43817, + "rebe": 25227, + "rebec": 10774, + "rebecca": 12892, + "rebel": 8185, + "rebel": 12248, + "rebellion": 22170, + "rebels": 13623, + "rebirth": 33303, + "reboot": 22385, + "reborn": 30229, + "reboun": 43381, + "rebound": 31280, + "rebounds": 19190, + "rebs": 28164, + "rebu": 43162, + "rebuild": 20022, + "rebuilding": 30880, + "rebuilt": 33137, + "rec": 1020, + "rec": 11243, + "recall": 15151, + "recalled": 32142, + "recalling": 47855, + "recalls": 24740, + "recap": 29816, + "recap": 8337, + "recaps": 47997, + "recard": 35536, + "rece": 1890, + "recei": 2148, + "receip": 38503, + "receipt": 30479, + "receipts": 41181, + "receive": 4800, + "received": 4178, + "receiver": 17659, + "receivers": 45294, + "receives": 10027, + "receiving": 7252, + "recent": 3969, + "recently": 4482, + "recep": 17450, + "reception": 8364, + "receptions": 46881, + "receptor": 41835, + "recess": 38182, + "recession": 27176, + "recharge": 29396, + "rechargeable": 37516, + "reci": 2037, + "recipe": 28923, + "recipe": 4614, + "recipeoftheday": 38727, + "recipes": 9243, + "recipi": 10136, + "recipient": 13703, + "recipients": 18940, + "recipro": 41789, + "recital": 23457, + "recite": 48824, + "reck": 11715, + "reckless": 26284, + "reckon": 23854, + "recl": 42277, + "reclaim": 35969, + "reclaimed": 32648, + "reco": 2535, + "reco": 46038, + "recogn": 6343, + "recogni": 5329, + "recognise": 19824, + "recognised": 20986, + "recognising": 48423, + "recognition": 9415, + "recognizable": 47240, + "recognize": 10905, + "recognized": 9929, + "recognizes": 26909, + "recognizing": 19666, + "recomm": 4540, + "recommend": 11628, + "recommend": 8942, + "recommendation": 20118, + "recommendations": 16516, + "recommended": 11100, + "recommending": 44301, + "recommends": 22940, + "recon": 15371, + "recon": 28996, + "reconciliation": 26451, + "reconstruction": 24955, + "recor": 1723, + "record": 21328, + "record": 2717, + "recorded": 9392, + "recorder": 26747, + "recording": 48237, + "recording": 6942, + "recordings": 19715, + "records": 4529, + "recover": 16785, + "recovered": 16444, + "recovering": 19005, + "recovers": 47935, + "recovery": 6591, + "recre": 22148, + "recreate": 29775, + "recreated": 40888, + "recreating": 48224, + "recreation": 17331, + "recreational": 24329, + "recru": 4745, + "recruit": 9011, + "recruit": 15585, + "recruited": 36518, + "recruiter": 43120, + "recruiters": 46542, + "recruiting": 10533, + "recruitment": 10541, + "recruits": 22647, + "recs": 33069, + "rectan": 43041, + "rectangular": 43321, + "rector": 41585, + "recu": 26798, + "recur": 19983, + "recurring": 35912, + "recy": 6790, + "recycla": 40659, + "recyclable": 48907, + "recycle": 19366, + "recycled": 16829, + "recycling": 12566, + "red": 1893, + "red": 736, + "redbubble": 46137, + "redbull": 29483, + "redbull": 29219, + "redcarpet": 32259, + "redcross": 30659, + "redd": 22149, + "redd": 40618, + "redding": 41061, + "reddish": 43383, + "reddit": 15226, + "reddy": 23028, + "rede": 10913, + "redeem": 37449, + "redefining": 46352, + "redemption": 20233, + "redesign": 24188, + "redesigned": 33111, + "redevelopment": 30322, + "redhead": 36267, + "redi": 7976, + "redman": 44753, + "redmond": 39627, + "rednation": 28180, + "rednationrising": 28262, + "redneck": 39105, + "redness": 22626, + "redo": 42524, + "redon": 48506, + "redro": 37722, + "reds": 11221, + "redskins": 19023, + "redsox": 19144, + "reduc": 5015, + "reduce": 6604, + "reduced": 10821, + "reduces": 20539, + "reducing": 13836, + "reduction": 12219, + "reductions": 48263, + "redux": 43014, + "redvelvet": 41845, + "redwings": 31058, + "redwood": 31748, + "ree": 9282, + "ree": 5813, + "reebok": 26734, + "reece": 30457, + "reed": 26209, + "reed": 10435, + "reedus": 32865, + "reef": 46557, + "reef": 15624, + "reefs": 34459, + "reel": 34467, + "reel": 17166, + "reels": 48127, + "reem": 48891, + "reen": 21638, + "reen": 23679, + "rees": 18314, + "reese": 20929, + "reeves": 23060, + "ref": 4067, + "ref": 9591, + "refe": 5624, + "refer": 18425, + "refer": 22325, + "referee": 20398, + "referees": 45583, + "referen": 13535, + "reference": 10214, + "references": 24009, + "referendum": 16732, + "referr": 47784, + "referral": 30219, + "referred": 22969, + "referring": 29797, + "refers": 30069, + "refill": 37859, + "refin": 13455, + "refined": 26098, + "refinery": 31393, + "refining": 48406, + "reflec": 4608, + "reflect": 13373, + "reflected": 28732, + "reflecting": 19700, + "reflection": 11884, + "reflections": 16647, + "reflective": 27008, + "reflects": 15821, + "reflex": 45756, + "reflex": 36050, + "reform": 45678, + "reform": 8875, + "reformation": 45119, + "reformed": 40880, + "reforms": 19274, + "refr": 34850, + "refre": 11995, + "refresh": 17836, + "refresh": 23288, + "refreshed": 35925, + "refresher": 41481, + "refreshing": 14159, + "refreshments": 31127, + "refriger": 21076, + "refrigerator": 36662, + "refs": 35595, + "refu": 3545, + "refuge": 5638, + "refuge": 17432, + "refugee": 11556, + "refugees": 42687, + "refugees": 8316, + "refund": 28899, + "refur": 15519, + "refurbi": 18259, + "refurbished": 26190, + "refurbishment": 35803, + "refusal": 46547, + "refuse": 16412, + "refused": 17190, + "refuses": 20085, + "refusing": 26704, + "reg": 5472, + "reg": 12353, + "regain": 37510, + "regal": 31512, + "regal": 25028, + "regan": 34062, + "regar": 5881, + "regard": 21801, + "regarded": 32017, + "regarding": 8493, + "regardless": 17220, + "regards": 23079, + "regatta": 26316, + "regen": 46545, + "regency": 29341, + "regeneration": 29257, + "regent": 30455, + "regents": 46710, + "regg": 12757, + "reggae": 37821, + "reggae": 15214, + "reggie": 21872, + "regi": 1608, + "regime": 11378, + "regiment": 18603, + "regin": 23287, + "regina": 16841, + "region": 16542, + "region": 4341, + "regional": 5552, + "regionals": 26043, + "regions": 14530, + "regis": 28094, + "register": 3967, + "registered": 10254, + "registering": 33510, + "registr": 29193, + "registration": 7302, + "registrations": 38423, + "registry": 30020, + "rego": 47351, + "regram": 30329, + "regrann": 48802, + "regre": 8627, + "regression": 43733, + "regret": 14374, + "regrets": 23231, + "regu": 3411, + "regui": 46722, + "regul": 11847, + "regular": 14882, + "regular": 6307, + "regularly": 17263, + "regulat": 14575, + "regulate": 33494, + "regulated": 31384, + "regulating": 48156, + "regulation": 14267, + "regulations": 16654, + "regulator": 30364, + "regulators": 35837, + "regulatory": 17717, + "reh": 21492, + "reha": 10193, + "rehab": 16973, + "rehabil": 17930, + "rehabilitation": 21042, + "rehear": 7273, + "rehearsal": 11482, + "rehearsals": 17977, + "rehearsing": 23125, + "rehman": 39206, + "rei": 15343, + "rei": 26033, + "reic": 41230, + "reich": 48589, + "reich": 28929, + "reid": 45125, + "reid": 11744, + "reig": 13092, + "reign": 41419, + "reign": 14827, + "reigning": 28409, + "reigns": 21217, + "reiki": 46960, + "reilly": 28120, + "reim": 35421, + "reimagined": 46799, + "reimbur": 39857, + "rein": 9240, + "rein": 45009, + "reina": 43847, + "reinde": 23810, + "reindeer": 25072, + "reinfor": 48161, + "reinforced": 41909, + "reinst": 33969, + "reinvent": 38171, + "reissue": 34042, + "reiter": 35394, + "rejec": 9958, + "reject": 22435, + "rejected": 17505, + "rejection": 32264, + "rejects": 23155, + "rejo": 20150, + "rejoice": 24712, + "rejuven": 26332, + "rek": 47542, + "rek": 19201, + "rel": 1825, + "rel": 5233, + "rela": 4362, + "reland": 15220, + "relat": 27192, + "relatable": 31010, + "relate": 17520, + "related": 5880, + "relates": 36064, + "relating": 27373, + "relation": 4561, + "relation": 16207, + "relations": 10100, + "relationship": 47239, + "relationship": 5837, + "relationships": 10610, + "relative": 17265, + "relatively": 18351, + "relatives": 21981, + "relax": 6777, + "relax": 9035, + "relaxation": 22194, + "relaxed": 18999, + "relaxing": 10256, + "relay": 12403, + "relays": 28404, + "rele": 1602, + "release": 29100, + "release": 2706, + "released": 3410, + "releases": 7393, + "releasethe": 44008, + "releasing": 10321, + "releg": 23378, + "relegated": 45884, + "relegation": 35040, + "relent": 22213, + "relentless": 27207, + "relessly": 33927, + "relev": 9349, + "relevance": 31400, + "relevant": 10568, + "reli": 2674, + "reliability": 27220, + "reliable": 13714, + "reliance": 27727, + "relic": 27802, + "relics": 43208, + "relief": 7518, + "relies": 41579, + "relieve": 28623, + "relieved": 36597, + "religi": 4940, + "religion": 8803, + "religions": 31189, + "religious": 8289, + "relish": 35550, + "relive": 23939, + "reliving": 47558, + "rell": 28802, + "rell": 7127, + "rella": 9952, + "relle": 31390, + "reloaded": 38908, + "relocated": 46791, + "relocation": 39198, + "rels": 23320, + "relu": 32058, + "reluct": 32549, + "reluctant": 45552, + "rely": 4158, + "relying": 42168, + "rem": 15098, + "rem": 21637, + "rema": 4569, + "remain": 29144, + "remain": 6415, + "remainder": 41672, + "remained": 23714, + "remaining": 11392, + "remains": 6807, + "remake": 16234, + "remark": 11136, + "remarkable": 12404, + "remarkably": 39087, + "remarks": 15001, + "remastered": 24932, + "rematch": 26473, + "rembrandt": 45972, + "reme": 20071, + "remedi": 18442, + "remedies": 25581, + "remedy": 25794, + "remem": 7966, + "rememb": 7062, + "remember": 22045, + "remember": 2195, + "remembered": 11763, + "remembering": 8135, + "remembers": 12551, + "remembrance": 40321, + "remembrance": 15860, + "remembranceday": 48333, + "rement": 7173, + "rements": 12667, + "remi": 41693, + "remin": 3216, + "remind": 9868, + "reminded": 12309, + "reminder": 5565, + "reminders": 34121, + "reminding": 19976, + "reminds": 8303, + "remington": 43527, + "reminis": 17723, + "reminiscent": 41704, + "reminiscing": 32552, + "remix": 8519, + "remixes": 31011, + "remn": 29127, + "remnants": 39032, + "remo": 4064, + "remo": 33259, + "remodel": 34159, + "remodel": 37495, + "remodeling": 41432, + "remote": 47163, + "remote": 9687, + "remotely": 32375, + "removable": 44095, + "removal": 13679, + "remove": 9709, + "removed": 10289, + "remover": 44267, + "removes": 29018, + "removing": 18504, + "remy": 30434, + "ren": 737, + "ren": 2596, + "rena": 12591, + "renais": 15409, + "renaissance": 16007, + "renal": 36096, + "renamed": 31535, + "renault": 17600, + "rence": 19245, + "rence": 1553, + "rences": 8545, + "rend": 33932, + "rend": 22851, + "render": 39752, + "render": 13024, + "rendered": 23652, + "rendering": 21339, + "renders": 39419, + "rendez": 43293, + "rendezvous": 45644, + "rendition": 28891, + "rendon": 46272, + "rendous": 49403, + "rends": 38842, + "rene": 15438, + "rene": 12597, + "renee": 23480, + "reneg": 29909, + "renegade": 41229, + "renergy": 37151, + "renew": 6645, + "renew": 22015, + "renewable": 31269, + "renewable": 15941, + "renewableenergy": 33357, + "renewables": 21619, + "renewal": 21270, + "renewed": 20524, + "renfre": 45043, + "reng": 36795, + "reno": 11520, + "reno": 12831, + "renov": 9984, + "renovated": 23839, + "renovation": 17121, + "renovations": 31311, + "renowned": 14727, + "rens": 18183, + "renshaw": 44445, + "rent": 17377, + "rent": 1609, + "rental": 12193, + "rentals": 24105, + "rented": 35932, + "rential": 31692, + "renting": 37662, + "rently": 2615, + "rents": 31109, + "reo": 15963, + "reo": 26854, + "reon": 15761, + "reopen": 26883, + "reopened": 32868, + "reopening": 36663, + "reopens": 40644, + "rep": 4229, + "rep": 6487, + "repair": 8419, + "repaired": 32953, + "repairing": 38534, + "repairs": 16297, + "repar": 34065, + "repe": 5785, + "repeal": 42622, + "repeal": 23938, + "repeat": 10192, + "repeated": 27904, + "repeatedly": 26630, + "repeating": 33834, + "repeats": 39158, + "repell": 46235, + "repent": 47261, + "reper": 29085, + "repet": 38533, + "repl": 13047, + "replac": 6069, + "replace": 9466, + "replaceable": 47762, + "replaced": 13200, + "replacement": 10835, + "replaces": 27781, + "replacing": 18647, + "replay": 16875, + "repleni": 44839, + "replic": 21651, + "replica": 18125, + "replied": 24238, + "replies": 18808, + "reply": 8965, + "replying": 47599, + "repor": 2628, + "report": 2417, + "reported": 7598, + "reportedly": 10953, + "reporter": 11019, + "reporters": 18454, + "reporting": 9218, + "reports": 4908, + "reposit": 41276, + "repository": 46977, + "repost": 33147, + "repost": 7217, + "repostapp": 38388, + "reposting": 20223, + "reppin": 19163, + "repping": 22574, + "repre": 3397, + "represent": 8293, + "represent": 8406, + "representation": 13520, + "representative": 13175, + "representatives": 15591, + "represented": 12299, + "representing": 7561, + "represents": 14433, + "repri": 31854, + "reproduction": 35714, + "reproductive": 25522, + "reps": 14265, + "reptile": 36938, + "reptiles": 38679, + "republic": 6376, + "republic": 7185, + "republican": 9842, + "republicans": 12384, + "repur": 41852, + "req": 42411, + "requ": 10664, + "reque": 9539, + "request": 7813, + "requested": 16199, + "requesting": 33245, + "requests": 17087, + "requi": 4863, + "requiem": 40316, + "require": 14437, + "required": 8500, + "requirement": 27146, + "requirements": 12860, + "requires": 13396, + "requiring": 33425, + "requis": 42602, + "rer": 41295, + "rer": 3407, + "rera": 14301, + "rero": 21860, + "rers": 18869, + "res": 4466, + "res": 934, + "resc": 3956, + "rescheduled": 43553, + "rescu": 8618, + "rescue": 28567, + "rescue": 5718, + "rescued": 11919, + "rescues": 32439, + "rescuing": 43770, + "rese": 13000, + "resear": 6090, + "research": 25694, + "research": 2379, + "researched": 42733, + "researcher": 18334, + "researchers": 9522, + "researching": 24544, + "reseller": 35391, + "resemb": 16916, + "resemblance": 26856, + "resemble": 37230, + "resembles": 35417, + "reser": 16420, + "reserv": 11906, + "reservation": 20289, + "reservations": 19307, + "reserve": 6911, + "reserved": 19796, + "reserves": 19705, + "reservoir": 20574, + "reset": 26250, + "resh": 47432, + "reshi": 39435, + "resi": 2152, + "residen": 22311, + "residence": 11672, + "residences": 38855, + "residency": 18545, + "resident": 9016, + "residente": 44637, + "residentevil": 48393, + "residential": 11002, + "residents": 6008, + "resign": 23584, + "resignation": 24779, + "resigned": 31014, + "resigns": 29738, + "resil": 10932, + "resili": 39212, + "resilience": 15271, + "resilient": 24694, + "resin": 24156, + "resist": 37345, + "resist": 9587, + "resistance": 7392, + "resistant": 17542, + "resisting": 43679, + "resolution": 9977, + "resolutions": 26816, + "resolve": 20787, + "resolved": 28807, + "reson": 18092, + "resonance": 42310, + "resort": 6594, + "resorts": 18839, + "resource": 43729, + "resource": 9760, + "resources": 6723, + "respec": 7466, + "respect": 31411, + "respect": 4916, + "respected": 19126, + "respectful": 24379, + "respecting": 36172, + "respective": 25817, + "respectively": 28794, + "respects": 23553, + "respir": 20771, + "respiratory": 24483, + "respon": 2421, + "respond": 12355, + "responded": 21121, + "respondents": 49253, + "responders": 25155, + "responding": 18037, + "responds": 17436, + "response": 5399, + "responses": 19006, + "responsi": 5490, + "responsibilities": 30375, + "responsibility": 11272, + "responsible": 8936, + "responsibly": 33675, + "responsive": 21544, + "ress": 34651, + "ress": 13629, + "resso": 15133, + "rest": 10974, + "rest": 2539, + "restart": 37378, + "restaur": 3775, + "restaurant": 41930, + "restaurant": 4489, + "restaurants": 11714, + "rested": 46020, + "resting": 18044, + "restless": 36724, + "restling": 30076, + "resto": 11118, + "resto": 41666, + "restock": 34060, + "restocked": 36966, + "restor": 8984, + "restoration": 11989, + "restorative": 46509, + "restore": 14008, + "restored": 14238, + "restoring": 24406, + "restra": 25424, + "restric": 11036, + "restricted": 27197, + "restriction": 44282, + "restrictions": 19884, + "restroom": 43423, + "restructuring": 43260, + "rests": 33775, + "resu": 10095, + "resul": 2655, + "result": 5659, + "resulted": 26449, + "resulting": 24581, + "results": 3790, + "resume": 15077, + "resumes": 30268, + "resur": 14865, + "resurg": 45962, + "resurgence": 47692, + "resurrec": 18487, + "resurrection": 25811, + "resusc": 47523, + "ret": 20500, + "ret": 10048, + "reta": 20153, + "retail": 14910, + "retail": 6455, + "retailer": 22549, + "retailers": 19418, + "retain": 24430, + "retained": 42737, + "retaining": 35571, + "retains": 42583, + "retali": 33101, + "retar": 29964, + "retarded": 44111, + "retention": 26247, + "rethink": 29078, + "rethinking": 42951, + "reti": 4721, + "retin": 31270, + "retina": 36919, + "retire": 18846, + "retired": 11477, + "retirement": 9205, + "retires": 29060, + "retiring": 21200, + "retrac": 32735, + "retreat": 11210, + "retri": 16918, + "retriever": 28394, + "retro": 6535, + "retro": 7755, + "retrogamer": 47220, + "retrogaming": 11316, + "retrospective": 27105, + "rett": 41082, + "rett": 8425, + "rette": 33066, + "return": 43042, + "return": 3458, + "returned": 10476, + "returning": 9290, + "returns": 5020, + "retwee": 48190, + "retweet": 3195, + "retweeted": 12705, + "retweeting": 32345, + "retweets": 10160, + "rety": 41550, + "reu": 20255, + "reu": 40371, + "reuben": 40450, + "reunion": 10247, + "reunite": 26179, + "reunited": 13516, + "reusable": 30395, + "reuse": 26535, + "reut": 15210, + "reuters": 15569, + "rev": 8424, + "rev": 11789, + "revamp": 29819, + "revamped": 36420, + "revan": 45277, + "reve": 3115, + "reveal": 8052, + "revealed": 7171, + "revealing": 21321, + "reveals": 6621, + "revel": 14133, + "revelation": 24053, + "revelations": 36163, + "reven": 10171, + "revenge": 12717, + "revenue": 10637, + "revenues": 33348, + "rever": 14829, + "rever": 41913, + "revere": 44187, + "reverend": 34407, + "revers": 20726, + "reversal": 33367, + "reverse": 12812, + "reversed": 42485, + "reversi": 31601, + "reversible": 34212, + "revi": 8317, + "review": 2268, + "reviewed": 16678, + "reviewer": 36409, + "reviewers": 48195, + "reviewing": 20458, + "reviews": 7227, + "revise": 46801, + "revised": 22806, + "revising": 46882, + "revision": 20335, + "revisit": 26568, + "revisited": 34302, + "revisiting": 33144, + "revit": 26367, + "revitalization": 46923, + "revival": 14142, + "revive": 26450, + "revived": 42912, + "revo": 28660, + "revol": 13447, + "revolt": 31697, + "revolu": 4900, + "revolution": 17699, + "revolution": 6644, + "revolutionary": 14734, + "revolver": 38747, + "revolving": 47230, + "revs": 49286, + "revue": 43428, + "rew": 37564, + "rewar": 15857, + "reward": 11223, + "rewarded": 27163, + "rewarding": 23351, + "rewards": 15235, + "rewatch": 35610, + "rewatching": 41287, + "rewind": 26867, + "rewrite": 45218, + "rex": 13002, + "rex": 10904, + "rexperience": 33924, + "rey": 9681, + "rey": 4517, + "reyes": 18255, + "reykja": 47571, + "reyn": 11998, + "reynolds": 14309, + "reys": 48284, + "rez": 27597, + "rez": 15192, + "reza": 35888, + "rf": 35529, + "rf": 16368, + "rfc": 19003, + "rfid": 40204, + "rg": 33055, + "rg": 14897, + "rgb": 36128, + "rgv": 33685, + "rh": 8745, + "rh": 22404, + "rha": 19473, + "rhapso": 32532, + "rhapsody": 35774, + "rhe": 9186, + "rhea": 28612, + "rhetor": 24359, + "rhetoric": 29985, + "rhett": 42984, + "rheu": 42953, + "rhi": 21212, + "rhin": 12269, + "rhine": 22863, + "rhine": 44833, + "rhinestone": 30450, + "rhino": 41744, + "rhino": 20056, + "rhinos": 30671, + "rho": 7637, + "rhode": 39302, + "rhode": 27907, + "rhodes": 17785, + "rhon": 25882, + "rhonda": 46100, + "rhp": 27199, + "rhs": 24551, + "rhu": 23897, + "rhubarb": 30213, + "rhy": 7740, + "rhyme": 37356, + "rhymes": 33143, + "rhys": 28647, + "rhyth": 27069, + "rhythm": 16172, + "rhythmic": 46386, + "rhythms": 40872, + "ri": 553, + "ri": 2574, + "ria": 3650, + "rial": 15200, + "rian": 7788, + "rib": 44634, + "rib": 18298, + "riba": 44992, + "ribb": 10081, + "ribbon": 12114, + "ribbons": 35271, + "ribe": 46115, + "ribs": 17519, + "ric": 920, + "ric": 4798, + "rica": 14230, + "rical": 18109, + "rican": 30958, + "ricardo": 23140, + "ricci": 35783, + "ricciardo": 49282, + "rice": 36362, + "rice": 4741, + "rich": 5223, + "rich": 4021, + "richar": 9350, + "richard": 9080, + "richard": 4470, + "richards": 11372, + "richardson": 15984, + "riche": 23286, + "richer": 34138, + "riches": 37093, + "richest": 25572, + "richi": 38934, + "richie": 19797, + "richland": 43079, + "richmond": 34143, + "richmond": 11292, + "richter": 37591, + "rick": 6237, + "rick": 3064, + "ricket": 46161, + "ricket": 23671, + "ricks": 23111, + "ricky": 19188, + "ricky": 12814, + "rico": 37962, + "rico": 11362, + "ricotta": 38473, + "rics": 7353, + "ricul": 6980, + "rid": 18103, + "rid": 9874, + "ridd": 21990, + "ridden": 32025, + "riddle": 31839, + "ride": 15816, + "ride": 2994, + "rider": 31056, + "rider": 9707, + "riders": 10826, + "rides": 11308, + "ridg": 42646, + "ridge": 16580, + "ridge": 6352, + "ridic": 9624, + "ridiculous": 12659, + "ridiculously": 25661, + "ridin": 47869, + "riding": 6765, + "ridley": 27883, + "rie": 14824, + "rie": 5322, + "ried": 7552, + "riel": 26696, + "rien": 35237, + "rier": 40714, + "rier": 13336, + "ries": 28179, + "ries": 3059, + "riesling": 36372, + "rif": 7044, + "riff": 30359, + "rifle": 15354, + "rifles": 25678, + "rift": 26681, + "rig": 18462, + "rig": 13871, + "riga": 36626, + "rigged": 35897, + "rigging": 38160, + "riggs": 40328, + "righ": 15391, + "right": 13341, + "right": 1155, + "righte": 20762, + "righteous": 28169, + "righteousness": 42481, + "rightful": 42601, + "rightly": 42669, + "rights": 3336, + "rigid": 43138, + "rigor": 36788, + "rigorous": 41654, + "rigs": 42893, + "rihanna": 13744, + "rij": 41097, + "rik": 31136, + "rik": 27832, + "rika": 28580, + "ril": 12270, + "ril": 2388, + "riley": 35056, + "riley": 12260, + "rill": 23705, + "rilla": 43956, + "rilla": 18685, + "rim": 28147, + "rim": 12199, + "rime": 27064, + "rimin": 11527, + "rimo": 47817, + "rims": 34327, + "rin": 5859, + "rin": 11739, + "rina": 12869, + "rine": 24952, + "ring": 8318, + "ring": 2540, + "ringed": 44712, + "ringer": 35761, + "ringing": 26035, + "ringo": 38845, + "rings": 5751, + "rington": 12455, + "rink": 21497, + "rinka": 47316, + "rino": 47188, + "rinse": 48320, + "rio": 15681, + "rio": 5782, + "rion": 31623, + "rion": 34046, + "rios": 32814, + "riot": 32636, + "riot": 14218, + "riots": 24844, + "rious": 6340, + "rip": 10353, + "rip": 4243, + "ripe": 22832, + "ripley": 41589, + "ripp": 25276, + "ripped": 17815, + "ripper": 35347, + "ripping": 29126, + "ripple": 24825, + "rips": 30182, + "rir": 36792, + "ris": 6108, + "ris": 1999, + "rise": 13641, + "rise": 3151, + "risen": 23653, + "risers": 44983, + "rises": 13362, + "riseup": 35760, + "rish": 18378, + "rish": 18927, + "rishi": 48434, + "rising": 30452, + "rising": 5448, + "risis": 37998, + "risk": 27967, + "risk": 4213, + "risking": 48155, + "risks": 12474, + "risky": 27630, + "risotto": 31471, + "rist": 40610, + "rit": 5156, + "rit": 17333, + "rita": 16178, + "ritchie": 30997, + "rite": 39318, + "rite": 18429, + "rites": 36160, + "rith": 48169, + "rith": 48850, + "riti": 32904, + "rito": 19379, + "ritos": 33507, + "ritt": 26092, + "ritter": 34854, + "ritu": 13391, + "ritual": 19712, + "rituals": 31145, + "ritz": 39151, + "ritz": 25627, + "rium": 33884, + "riv": 25113, + "rival": 13412, + "rival": 15629, + "rivalry": 19511, + "rivals": 15135, + "rive": 27588, + "rive": 34917, + "river": 5239, + "river": 2473, + "rivera": 18275, + "riverdale": 28304, + "riverfront": 44439, + "rivers": 10723, + "riverside": 15809, + "riveting": 44024, + "riviera": 25851, + "rix": 43407, + "rix": 9483, + "riya": 36908, + "riyad": 31564, + "riyadh": 33577, + "riz": 18426, + "riz": 35411, + "rizal": 41555, + "rizio": 40191, + "rizz": 34826, + "rizzo": 49076, + "rj": 26016, + "rj": 20949, + "rk": 38725, + "rk": 21422, + "rl": 18041, + "rl": 14590, + "rlly": 43222, + "rly": 25954, + "rm": 20202, + "rm": 8431, + "rmb": 49097, + "rms": 40529, + "rn": 13206, + "rn": 7666, + "rna": 24566, + "rnb": 31556, + "rnc": 35309, + "rnli": 29748, + "ro": 532, + "ro": 2795, + "roa": 8313, + "roach": 31073, + "road": 4370, + "road": 1759, + "roadhouse": 47891, + "roadmap": 30111, + "roads": 6189, + "roadsafety": 39992, + "roadshow": 21168, + "roadside": 26928, + "roadster": 28920, + "roadto": 24681, + "roadtrip": 15094, + "roadway": 42744, + "roam": 34045, + "roaming": 29240, + "roano": 34184, + "roanoke": 36587, + "roar": 34193, + "roar": 18483, + "roaring": 26428, + "roast": 11404, + "roasted": 10479, + "roasting": 32228, + "rob": 2668, + "rob": 6442, + "robb": 14059, + "robb": 39673, + "robbed": 24163, + "robber": 35545, + "robbers": 40852, + "robbery": 16393, + "robbi": 44898, + "robbie": 37200, + "robbie": 15970, + "robbing": 47569, + "robbins": 23461, + "robby": 44128, + "robe": 23116, + "rober": 4532, + "robert": 8811, + "robert": 3929, + "roberta": 43373, + "roberto": 42645, + "roberto": 16227, + "roberts": 10366, + "robertson": 17643, + "robes": 29304, + "robi": 16743, + "robin": 6681, + "robin": 7988, + "robins": 35502, + "robinson": 8523, + "robles": 47646, + "roblo": 27481, + "roblox": 37798, + "robo": 4672, + "robo": 36057, + "robot": 46089, + "robot": 8797, + "robotic": 23975, + "robotics": 13546, + "robots": 13473, + "robson": 31113, + "robust": 22780, + "robyn": 34533, + "roc": 3268, + "roc": 13776, + "rocco": 30009, + "roch": 23788, + "rochdale": 41880, + "roche": 31776, + "rochelle": 40161, + "rochester": 18057, + "rock": 2640, + "rock": 2172, + "rockab": 39353, + "rockabilly": 45019, + "rocke": 19914, + "rocked": 16116, + "rockefeller": 35476, + "rocker": 29008, + "rockers": 32338, + "rocket": 25435, + "rocket": 8383, + "rockets": 13292, + "rockford": 41039, + "rockies": 20621, + "rockin": 12073, + "rocking": 7081, + "rockn": 24442, + "rocknroll": 27840, + "rocks": 6135, + "rockstar": 23603, + "rockstar": 18000, + "rockstargames": 27516, + "rockstars": 46639, + "rockthe": 49363, + "rockwell": 34747, + "rocky": 33481, + "rocky": 9648, + "rod": 9712, + "rod": 8291, + "roddy": 42332, + "rode": 18449, + "rodeo": 18250, + "rodgers": 17612, + "rodi": 49100, + "rodney": 21753, + "rodri": 11053, + "rodrigo": 33944, + "rodriguez": 14057, + "rods": 28618, + "roe": 27671, + "roe": 9996, + "rof": 33029, + "rofl": 48228, + "roft": 45212, + "rog": 34269, + "rog": 34017, + "rogen": 23380, + "roger": 13929, + "roger": 7735, + "rogerfederer": 40182, + "rogers": 10661, + "rogue": 32575, + "rogue": 15162, + "roh": 14933, + "roh": 29840, + "rohan": 39848, + "rohing": 23600, + "rohingya": 26146, + "rohit": 44649, + "rohit": 24299, + "roi": 21877, + "rok": 36807, + "rol": 3393, + "rol": 7818, + "roland": 33713, + "roland": 19569, + "role": 18485, + "role": 3414, + "roles": 11871, + "rolex": 21093, + "rolf": 48606, + "roll": 4711, + "roll": 3341, + "rolled": 11982, + "roller": 21034, + "roller": 12342, + "rollercoaster": 38248, + "rollers": 36941, + "rollin": 27545, + "rolling": 24250, + "rolling": 6347, + "rollingstones": 41309, + "rollins": 27724, + "rollout": 47710, + "rollover": 39214, + "rolls": 8614, + "rolltide": 28101, + "rom": 11377, + "rom": 19205, + "roma": 44134, + "roma": 11631, + "romain": 48897, + "roman": 4416, + "roman": 7370, + "romance": 7215, + "romania": 15884, + "romanian": 30866, + "romano": 38409, + "romans": 23066, + "romantic": 41457, + "romantic": 8821, + "rome": 9406, + "rome": 5243, + "romeo": 14429, + "romero": 23694, + "romney": 19287, + "romo": 32248, + "romper": 43699, + "ron": 2393, + "ron": 3372, + "rona": 42385, + "ronal": 46194, + "ronald": 15683, + "ronaldo": 13463, + "ronan": 34971, + "rond": 31935, + "ronda": 37436, + "rondo": 43756, + "rone": 48082, + "rone": 32763, + "roni": 47234, + "ronnie": 45257, + "ronnie": 16421, + "rons": 19536, + "ront": 48881, + "roo": 1249, + "roo": 31227, + "rood": 38007, + "roof": 9120, + "roof": 6449, + "roofing": 24415, + "roofs": 34635, + "rooftop": 16319, + "rook": 35918, + "rookie": 9771, + "rookies": 31917, + "room": 8845, + "room": 1530, + "roomie": 36851, + "roommate": 19825, + "roommates": 37323, + "rooms": 6328, + "rooney": 17712, + "roos": 32938, + "roosevel": 17644, + "roosevelt": 18488, + "rooster": 46263, + "rooster": 30926, + "roosters": 43693, + "root": 25930, + "root": 9728, + "rooted": 30428, + "rooting": 25523, + "roots": 8084, + "rop": 43401, + "rope": 9953, + "ropes": 30506, + "ror": 8668, + "ror": 2843, + "rors": 12072, + "rory": 42804, + "rory": 17813, + "ros": 5288, + "ros": 6930, + "rosa": 14393, + "rosal": 30397, + "rosario": 33640, + "rosary": 33098, + "rosberg": 46037, + "rose": 6146, + "rose": 3568, + "roseanne": 47528, + "rosel": 33616, + "rosemary": 19472, + "rosen": 13214, + "rosen": 36424, + "rosenberg": 43558, + "rosenthal": 46990, + "roses": 9061, + "rosetta": 43800, + "rosewood": 38686, + "rosie": 43049, + "rosie": 16888, + "ross": 8801, + "ross": 2158, + "rosse": 11602, + "rossi": 24817, + "rosso": 33023, + "roster": 12487, + "roswell": 45116, + "rosy": 46705, + "rosé": 28006, + "rot": 10055, + "rot": 9643, + "rotar": 45959, + "rotary": 14654, + "rotating": 32265, + "rotation": 18089, + "rotc": 32252, + "roth": 17741, + "roth": 19139, + "rother": 23174, + "rotherham": 37687, + "rothschild": 45089, + "roti": 46940, + "roto": 34698, + "rotor": 42991, + "rots": 16642, + "rott": 34806, + "rotten": 24324, + "rotter": 22614, + "rotterdam": 23422, + "rotun": 42970, + "rou": 2964, + "rou": 34783, + "roud": 28375, + "rouge": 16209, + "rough": 11699, + "rough": 8511, + "roughly": 21910, + "roughs": 37598, + "rouhani": 39912, + "roulette": 39930, + "roun": 5602, + "round": 9403, + "round": 2522, + "roundabout": 29953, + "rounded": 26973, + "rounder": 37024, + "rounding": 40208, + "rounds": 11242, + "roundtable": 19386, + "roundup": 17503, + "roup": 29220, + "rourke": 38753, + "rous": 33645, + "rous": 34531, + "rousey": 46267, + "rout": 7502, + "rout": 41778, + "route": 5261, + "router": 29962, + "routes": 14923, + "routine": 12319, + "routines": 44074, + "routing": 44086, + "roux": 43416, + "rov": 23971, + "rove": 30130, + "rover": 12776, + "rovers": 16373, + "row": 5275, + "row": 1044, + "rowan": 26240, + "rowdy": 32141, + "rowe": 28323, + "rowed": 22615, + "rower": 43345, + "rowers": 41806, + "rowing": 12807, + "rowland": 33037, + "rowley": 48793, + "rowling": 29371, + "rown": 22287, + "rown": 25060, + "rows": 9409, + "rox": 14111, + "rox": 41033, + "roxy": 28093, + "roy": 2128, + "roy": 6354, + "royal": 6691, + "royal": 3853, + "royale": 20630, + "royalnavy": 41545, + "royals": 13335, + "royalties": 48660, + "royalty": 18296, + "royalwedding": 27461, + "royce": 18444, + "royd": 41476, + "royo": 39357, + "roz": 28989, + "roz": 37250, + "rp": 17305, + "rp": 8174, + "rpa": 41872, + "rpg": 12445, + "rpm": 23715, + "rps": 49215, + "rr": 5311, + "rr": 9126, + "rrp": 36967, + "rrr": 18267, + "rrrr": 25561, + "rrrr": 34444, + "rs": 6978, + "rs": 1724, + "rsa": 29437, + "rsc": 48524, + "rsd": 34426, + "rsi": 39046, + "rsl": 44752, + "rsp": 16381, + "rspb": 38508, + "rspb": 36727, + "rspca": 45643, + "rss": 46466, + "rss": 22350, + "rstats": 38700, + "rsvp": 9774, + "rt": 8959, + "rt": 8991, + "rtc": 31648, + "rte": 33822, + "rte": 23322, + "rtg": 22028, + "rti": 47549, + "rtr": 43999, + "rts": 8496, + "rtw": 34673, + "ru": 681, + "ru": 13735, + "rub": 15862, + "rub": 22586, + "rubb": 19597, + "rubbed": 45239, + "rubber": 31131, + "rubber": 11331, + "rubbing": 41262, + "rubbish": 21108, + "rubble": 42230, + "ruben": 44058, + "ruben": 29722, + "rubi": 27856, + "rubin": 34128, + "rubio": 24244, + "rubs": 43422, + "ruby": 24552, + "ruby": 11493, + "ruck": 27449, + "rucker": 45402, + "rud": 35256, + "rudd": 31836, + "rude": 16548, + "rudi": 48360, + "rudol": 40927, + "rudolf": 46835, + "rudolph": 30119, + "rudy": 38226, + "rudy": 22131, + "rue": 38024, + "rue": 19276, + "rufc": 45084, + "ruff": 28177, + "ruff": 30304, + "rufus": 39322, + "rug": 4217, + "rug": 19220, + "rugby": 15091, + "rugby": 4964, + "rugbyleague": 44419, + "ruger": 48655, + "rugged": 25225, + "rugs": 29946, + "rui": 46974, + "ruin": 16256, + "ruined": 17231, + "ruining": 29952, + "ruins": 16094, + "ruiz": 27873, + "ruk": 46628, + "rukh": 43075, + "rukh": 27631, + "rule": 31643, + "rule": 6175, + "ruled": 16324, + "ruler": 26286, + "rulers": 45328, + "rules": 5272, + "ruling": 14690, + "rum": 9223, + "rum": 11233, + "rumb": 42432, + "rumble": 18900, + "rumi": 31428, + "rumor": 22254, + "rumored": 36694, + "rumors": 16160, + "rumour": 34296, + "rumours": 20716, + "rump": 29366, + "run": 1639, + "run": 1934, + "runaway": 28851, + "runchat": 25838, + "rundown": 41100, + "rune": 33882, + "rune": 49244, + "runner": 37370, + "runner": 7913, + "runners": 10571, + "runnin": 43130, + "running": 24451, + "running": 2761, + "runoff": 38564, + "runs": 5586, + "runway": 13927, + "rup": 7996, + "rup": 14980, + "rupaul": 44211, + "rupee": 43916, + "rupees": 44110, + "rupert": 25625, + "rupt": 23055, + "ruption": 35403, + "rural": 28801, + "rural": 8737, + "rus": 35811, + "rus": 5998, + "rush": 12148, + "rush": 6973, + "rushed": 28104, + "rusher": 48745, + "rushes": 47217, + "rushing": 20284, + "russ": 6285, + "russ": 20764, + "russell": 26122, + "russell": 8150, + "russi": 2600, + "russia": 4018, + "russian": 30731, + "russian": 4868, + "russians": 25413, + "russo": 30679, + "rust": 28682, + "rust": 14212, + "rustic": 19822, + "rusty": 43966, + "rusty": 22646, + "rut": 14973, + "rut": 39102, + "rutger": 49029, + "rutgers": 28934, + "ruth": 15798, + "ruth": 12029, + "ruther": 26676, + "rutherford": 31070, + "ruthless": 36063, + "rutland": 46024, + "ruto": 43702, + "ruz": 23275, + "rv": 17135, + "rv": 17951, + "rva": 24278, + "rw": 9085, + "rw": 22926, + "rwa": 47452, + "rwand": 31758, + "rwanda": 15427, + "rwby": 39698, + "rwc": 32321, + "rx": 41188, + "rx": 15945, + "ry": 1511, + "ry": 913, + "ryan": 8682, + "ryan": 4053, + "ryanair": 43526, + "ryder": 43564, + "ryder": 21805, + "rye": 24015, + "rye": 17409, + "rying": 7838, + "ryn": 37728, + "ryo": 24460, + "rys": 21654, + "ryu": 46656, + "ryu": 34604, + "ré": 29106, + "s": 82, + "s": 338, + "sa": 774, + "sa": 1344, + "saa": 13429, + "saab": 27158, + "saad": 36530, + "saas": 25761, + "saat": 33151, + "sab": 3233, + "sab": 23213, + "saba": 38344, + "sabah": 32854, + "saban": 41620, + "sabar": 47102, + "sabbath": 26008, + "sabc": 30010, + "sabcnews": 41093, + "saber": 46822, + "saber": 25624, + "sabha": 23431, + "sabi": 47073, + "sabine": 44062, + "sable": 19224, + "sabot": 30700, + "sabotage": 40496, + "sabre": 35110, + "sabres": 29620, + "sabrin": 37029, + "sabrina": 24994, + "sac": 3632, + "sac": 12905, + "sach": 30168, + "sacha": 49010, + "sachin": 47527, + "sachin": 30297, + "sachs": 31451, + "sack": 28964, + "sack": 14979, + "sacked": 27519, + "sacks": 26441, + "sacram": 13334, + "sacramento": 16065, + "sacred": 40612, + "sacred": 12477, + "sacri": 15283, + "sacrif": 12117, + "sacrific": 16919, + "sacrifice": 12556, + "sacrificed": 31116, + "sacrifices": 28858, + "sacrificing": 48146, + "sad": 2810, + "sad": 3719, + "saddened": 27720, + "saddest": 34925, + "saddle": 30469, + "saddle": 20283, + "sade": 27429, + "sadh": 40955, + "sadi": 22207, + "sadie": 30333, + "sadiq": 44107, + "sadler": 45600, + "sadly": 11603, + "sadness": 20399, + "sae": 38633, + "sae": 34883, + "saeed": 29745, + "saf": 2125, + "saf": 25760, + "safar": 23443, + "safari": 14091, + "safarilive": 34816, + "safc": 27998, + "safe": 2901, + "safe": 2996, + "safeguard": 42249, + "safeguarding": 47451, + "safely": 11513, + "safer": 40124, + "safer": 15504, + "safest": 38973, + "safety": 19050, + "safety": 3406, + "safetyfirst": 43608, + "saffron": 27529, + "sag": 6609, + "sag": 30048, + "saga": 15758, + "sagan": 37193, + "sagar": 42518, + "sage": 25800, + "sage": 7509, + "sages": 25979, + "sagin": 47097, + "sagitt": 44685, + "sagu": 44708, + "sah": 30943, + "sah": 26342, + "saha": 36062, + "sahara": 24599, + "saharan": 44255, + "sahi": 24608, + "sahib": 34150, + "sai": 16048, + "sai": 10886, + "said": 40319, + "said": 1946, + "saif": 44164, + "saig": 36328, + "saigon": 41081, + "sail": 7528, + "sail": 12156, + "sailed": 43047, + "sailing": 11003, + "sailor": 28002, + "sailor": 16076, + "sailormoon": 40673, + "sailors": 25355, + "sails": 27526, + "sain": 21226, + "sain": 40378, + "sains": 24860, + "sainsbury": 45879, + "sainsburys": 36934, + "saint": 11274, + "saint": 5599, + "saints": 8769, + "saintsfc": 31102, + "sair": 46600, + "sair": 30971, + "saire": 28087, + "saison": 33256, + "sait": 48008, + "saj": 33580, + "sak": 11511, + "sak": 35900, + "saka": 33609, + "sake": 12874, + "sakh": 43945, + "saki": 40514, + "saku": 37550, + "sakura": 24162, + "sal": 980, + "sal": 6126, + "sala": 17300, + "salaam": 46773, + "salad": 6188, + "salads": 30948, + "salah": 22516, + "salam": 19007, + "salam": 33963, + "salamat": 44696, + "salami": 46885, + "salaries": 33132, + "salary": 16312, + "salazar": 45988, + "sale": 17786, + "sale": 1690, + "saleh": 38353, + "salem": 48194, + "salem": 16884, + "sales": 13347, + "sales": 3765, + "salesforce": 22680, + "salesman": 37633, + "salford": 25629, + "sali": 15411, + "salim": 42760, + "salinas": 41990, + "saline": 46918, + "salis": 20667, + "salis": 39378, + "salisbury": 24763, + "sall": 27122, + "sall": 20883, + "salle": 23738, + "sally": 29542, + "sally": 13349, + "salman": 13754, + "salman": 16219, + "salmankhan": 15177, + "salmon": 37040, + "salmon": 9137, + "salom": 38268, + "salon": 33916, + "salon": 11105, + "saloon": 26038, + "sals": 16307, + "salsa": 16442, + "salt": 12763, + "salt": 6611, + "salted": 26313, + "saltlife": 47809, + "salts": 40559, + "saltwater": 43616, + "salty": 20678, + "salu": 31711, + "salud": 46867, + "salut": 44998, + "salute": 44908, + "salute": 9747, + "salutes": 32762, + "salv": 8299, + "salvador": 20874, + "salvage": 33131, + "salvation": 19534, + "salvatore": 38772, + "salz": 33594, + "salzburg": 43396, + "sam": 1644, + "sam": 3730, + "sama": 19272, + "samanth": 11465, + "samantha": 15466, + "samanthap": 38266, + "samanthaprabhu": 38643, + "samar": 21820, + "samaritan": 45495, + "samba": 37190, + "same": 23062, + "same": 2208, + "samheughan": 36255, + "sami": 48400, + "sami": 24322, + "sammy": 31091, + "sammy": 16758, + "samo": 30006, + "samoa": 34932, + "samp": 31225, + "sample": 9542, + "sampler": 40629, + "samples": 13387, + "sampling": 19522, + "sampson": 39983, + "sams": 44667, + "samson": 34659, + "samsun": 47875, + "samsung": 35369, + "samsung": 8115, + "samu": 7646, + "samuel": 30612, + "samuel": 12787, + "samurai": 21739, + "san": 1489, + "san": 2223, + "sana": 19434, + "sanantonio": 34714, + "sanat": 29091, + "sanatomy": 36052, + "sanc": 7398, + "sance": 15930, + "sanchez": 13971, + "sanctioned": 43032, + "sanctions": 17790, + "sanctu": 12712, + "sanctuary": 14044, + "sand": 2147, + "sand": 5094, + "sandal": 36445, + "sandal": 42185, + "sandals": 20731, + "sandalwood": 47502, + "sandeep": 46973, + "sander": 34111, + "sanders": 10429, + "sanderson": 36198, + "sandi": 44249, + "sandiego": 45997, + "sandiego": 15793, + "sandman": 45730, + "sando": 35921, + "sandoval": 44157, + "sandra": 33733, + "sandra": 13415, + "sandro": 42389, + "sands": 5936, + "sandstone": 36796, + "sandwich": 17050, + "sandwich": 8687, + "sandwiches": 19667, + "sandy": 29679, + "sandy": 10355, + "sane": 23419, + "sanford": 32330, + "sanfrancisco": 20254, + "sang": 13235, + "sang": 11684, + "sange": 12466, + "sangria": 42665, + "sani": 39137, + "sani": 34492, + "sanitary": 33842, + "sanitation": 25414, + "saniti": 43987, + "sanity": 30517, + "sanjay": 31712, + "sanjay": 25796, + "sanje": 40405, + "sanjose": 45971, + "sank": 43692, + "sano": 34053, + "sans": 16982, + "sansk": 39689, + "sanskrit": 48083, + "sant": 8356, + "sant": 23120, + "santa": 22175, + "santa": 4555, + "santac": 28876, + "santam": 45627, + "santana": 27033, + "santander": 46476, + "santi": 13856, + "santiago": 16568, + "santo": 29631, + "santo": 18400, + "santor": 28448, + "santorini": 39573, + "santos": 16582, + "sany": 47679, + "sao": 28026, + "sap": 8089, + "sap": 11591, + "sapi": 40016, + "sapp": 13427, + "sapp": 40729, + "sapphire": 22044, + "sar": 1808, + "sar": 9424, + "sara": 37196, + "sara": 10063, + "sarab": 40716, + "sarac": 35722, + "sarah": 9086, + "sarah": 5327, + "saraj": 42592, + "sarajevo": 48211, + "saras": 20373, + "sarasota": 31990, + "sarato": 24845, + "saratoga": 29496, + "sarawak": 47331, + "sarcasm": 37246, + "sarcastic": 48639, + "sardar": 41786, + "sarde": 43925, + "sardin": 27383, + "sardinia": 41025, + "sare": 13051, + "saree": 30860, + "sargent": 34864, + "sari": 42327, + "sari": 20261, + "saries": 47586, + "sarkar": 30673, + "sarko": 33658, + "sarkodie": 42848, + "sarmy": 20954, + "sart": 33006, + "sary": 15398, + "sas": 3960, + "sas": 5235, + "sash": 35656, + "sasha": 46078, + "sasha": 20894, + "sasia": 44751, + "sask": 47091, + "sask": 30416, + "saskat": 17102, + "saskatchewan": 23899, + "saskatoon": 31128, + "sass": 31351, + "sassy": 20827, + "sat": 1382, + "sat": 3279, + "sata": 41520, + "satan": 19446, + "satanic": 38224, + "satchel": 45908, + "sate": 35749, + "satell": 9031, + "satellite": 10316, + "satellites": 28483, + "sath": 29675, + "sathletics": 30154, + "sati": 7038, + "satin": 21803, + "sation": 23674, + "sations": 31232, + "satire": 29875, + "satis": 9906, + "satisf": 22941, + "satisfaction": 19925, + "satisfied": 18101, + "satisfy": 29444, + "satisfying": 23755, + "sato": 34376, + "satu": 45283, + "satur": 1634, + "saturated": 32466, + "saturday": 12537, + "saturday": 1748, + "saturdaymorning": 29053, + "saturdaymotivation": 40843, + "saturdays": 18930, + "saturn": 17312, + "saty": 39426, + "sau": 2096, + "sau": 19455, + "sauce": 5520, + "saucer": 42272, + "sauces": 40367, + "saucy": 46684, + "saudi": 24511, + "saudi": 8548, + "saudiarabia": 28680, + "sauer": 46333, + "saul": 47623, + "saul": 23252, + "sault": 40361, + "sauna": 35460, + "saunders": 23794, + "saur": 13227, + "saura": 46532, + "saurus": 22118, + "saus": 36121, + "sausage": 11855, + "sausages": 31593, + "sauté": 36290, + "sautéed": 38517, + "sauvi": 30116, + "sauvignon": 32745, + "sav": 2248, + "sav": 26533, + "sava": 40198, + "savag": 43039, + "savage": 11859, + "savannah": 18662, + "save": 5895, + "save": 2673, + "saved": 7137, + "saveour": 33390, + "saver": 20987, + "savers": 31416, + "saves": 12907, + "savethe": 18031, + "savi": 14721, + "saving": 28498, + "saving": 6979, + "savings": 10651, + "savior": 24762, + "saviour": 35800, + "savor": 48071, + "savory": 32992, + "savoury": 49071, + "savoy": 39552, + "savvy": 29278, + "saw": 12429, + "saw": 2425, + "sawa": 39613, + "sawards": 29012, + "sawyer": 27726, + "sax": 14169, + "sax": 23766, + "saxon": 31856, + "saxophon": 43760, + "saxophone": 32296, + "say": 3047, + "say": 1451, + "saya": 35170, + "sayang": 46322, + "sayers": 44116, + "sayin": 23662, + "saying": 4455, + "says": 1563, + "saz": 35577, + "sb": 5576, + "sb": 4977, + "sba": 44970, + "sback": 43840, + "sband": 27539, + "sbaseball": 46491, + "sbball": 39190, + "sbc": 31404, + "sberg": 20358, + "sbi": 41369, + "sbk": 39211, + "sboro": 18909, + "sbridge": 49228, + "sbs": 18883, + "sbu": 48075, + "sbu": 46281, + "sburg": 7390, + "sburgh": 48205, + "sbury": 14081, + "sby": 26519, + "sby": 10287, + "sc": 663, + "sc": 3219, + "sca": 11001, + "scab": 31716, + "scaf": 28981, + "scafe": 45574, + "scaffolding": 41687, + "scal": 10859, + "scala": 37997, + "scalable": 44084, + "scale": 37817, + "scale": 5879, + "scaled": 41923, + "scales": 22891, + "scaling": 29116, + "scallo": 19936, + "scallop": 39544, + "scallops": 31430, + "scalp": 38898, + "scam": 17620, + "scam": 13215, + "scamp": 28451, + "scams": 34395, + "scan": 10650, + "scan": 11261, + "scanada": 27121, + "scand": 8110, + "scandal": 35420, + "scandal": 11622, + "scandals": 45490, + "scandin": 32014, + "scandinavian": 35661, + "scanned": 43719, + "scanner": 24185, + "scanning": 24092, + "scans": 31251, + "scap": 35883, + "scape": 36005, + "scape": 12314, + "scapes": 31933, + "scar": 4171, + "scar": 18088, + "scarborough": 24254, + "scarce": 38572, + "scarcity": 45812, + "scare": 33536, + "scare": 15920, + "scarec": 38814, + "scarecrow": 46504, + "scared": 9870, + "scares": 34096, + "scarf": 13365, + "scari": 27050, + "scariest": 37213, + "scarlet": 20389, + "scarlett": 28325, + "scars": 20747, + "scarves": 29249, + "scary": 9250, + "scat": 13899, + "scattered": 22090, + "scavenger": 36778, + "scc": 19458, + "scd": 48422, + "scen": 2204, + "scenario": 20456, + "scenarios": 31346, + "scence": 33418, + "scene": 3562, + "scenery": 16025, + "scenes": 5415, + "scenic": 15394, + "scent": 36277, + "scent": 7683, + "scented": 27190, + "scenter": 23059, + "scentre": 39371, + "scents": 26336, + "scep": 24439, + "scfc": 38578, + "sch": 844, + "sch": 7542, + "scha": 42809, + "schaf": 45588, + "schaft": 41010, + "schal": 35568, + "schalke": 41029, + "schallenge": 43665, + "schan": 31328, + "schar": 15085, + "schat": 31842, + "schau": 35830, + "sche": 3038, + "sche": 7289, + "schedu": 4207, + "schedule": 5521, + "scheduled": 10986, + "schedules": 28986, + "scheduling": 32216, + "scheer": 26776, + "schel": 39881, + "schel": 38569, + "schem": 17720, + "scheme": 9024, + "schemes": 22958, + "schen": 22738, + "scher": 21925, + "scher": 21299, + "schi": 13731, + "schi": 24984, + "schicago": 46230, + "schiff": 39431, + "schild": 32148, + "schiz": 33230, + "schizoph": 40004, + "schizophre": 41163, + "schle": 32022, + "schmid": 17375, + "schmidt": 18463, + "schnau": 45745, + "schnei": 19941, + "schneider": 22972, + "schnit": 40903, + "scho": 2493, + "schoice": 23860, + "schol": 4498, + "scholar": 7192, + "scholar": 12830, + "scholarly": 41065, + "scholars": 13818, + "scholarship": 9070, + "scholarships": 17866, + "scholastic": 35743, + "schoo": 20721, + "school": 6063, + "school": 1228, + "schooled": 44722, + "schoolers": 31455, + "schooling": 28608, + "schools": 3513, + "schre": 47685, + "schri": 25453, + "schro": 32381, + "schu": 11318, + "schubert": 46939, + "schul": 14945, + "schultz": 30308, + "schulz": 39572, + "schumacher": 39208, + "schumer": 25313, + "schur": 42475, + "schwab": 47602, + "schwar": 13985, + "schwartz": 30617, + "schwarz": 27074, + "schwarzenegger": 33860, + "schwe": 25324, + "sci": 2267, + "sci": 8309, + "sciart": 31704, + "scicom": 28606, + "scicomm": 29573, + "scien": 39261, + "science": 10201, + "science": 2497, + "sciencefiction": 39170, + "sciences": 11481, + "scienti": 4338, + "scientific": 9750, + "scientist": 11083, + "scientists": 8045, + "sciento": 36193, + "scientology": 44694, + "scifi": 41862, + "scifi": 12230, + "scion": 47208, + "sciss": 25667, + "scissors": 30867, + "sciutto": 44392, + "sclerosis": 39446, + "sclub": 20017, + "sco": 1065, + "sco": 4763, + "scoe": 31164, + "scol": 13599, + "scoll": 44895, + "scollege": 39536, + "scom": 26407, + "scon": 17163, + "scon": 29272, + "scones": 36443, + "sconf": 39704, + "scoo": 14199, + "scooby": 34469, + "scoop": 13829, + "scoops": 41360, + "scope": 7979, + "scopes": 30328, + "scopic": 23869, + "scopy": 20018, + "scor": 8442, + "score": 12067, + "score": 4431, + "scoreboard": 30104, + "scorecard": 38128, + "scored": 6143, + "scoreless": 33469, + "scorer": 16572, + "scorers": 26699, + "scores": 7039, + "scoring": 9198, + "scorpi": 15445, + "scorpio": 34331, + "scorpion": 28461, + "scorpions": 45401, + "scorsese": 45975, + "scot": 2496, + "scot": 9271, + "scotch": 16687, + "scoti": 46446, + "scotia": 27859, + "scotland": 29174, + "scotland": 4203, + "scots": 17260, + "scotsman": 39612, + "scott": 7775, + "scott": 3664, + "scotti": 6227, + "scottish": 18039, + "scottish": 7442, + "scottsdale": 27817, + "scotty": 39697, + "scotty": 26836, + "scotus": 21720, + "scou": 44909, + "scoun": 16110, + "scouncil": 48787, + "scountry": 40432, + "scour": 46172, + "scout": 32213, + "scout": 10786, + "scouting": 19072, + "scouts": 14837, + "scow": 27929, + "scowboys": 31386, + "scp": 45030, + "scr": 36131, + "scra": 11187, + "scrabble": 39488, + "scram": 17289, + "scramble": 32688, + "scrambled": 39026, + "scran": 41774, + "scranton": 45274, + "scrap": 27950, + "scrap": 21695, + "scrapbook": 48733, + "scrapped": 43325, + "scraps": 40809, + "scrat": 9572, + "scratch": 13258, + "scratched": 48831, + "scratches": 46556, + "scratching": 44617, + "scre": 1795, + "scream": 31645, + "scream": 13239, + "screamed": 35427, + "screaming": 12891, + "screams": 23989, + "screen": 5351, + "screen": 3750, + "screened": 31450, + "screening": 6688, + "screenings": 27655, + "screenplay": 30058, + "screens": 12689, + "screenshot": 20637, + "screenshot": 12646, + "screenshots": 26783, + "screenshotsaturday": 21406, + "screenwriter": 37293, + "screenwriting": 35465, + "screw": 25529, + "screw": 14225, + "screwdriver": 48748, + "screwed": 30592, + "screws": 38292, + "scri": 2139, + "scrib": 34259, + "scribe": 36228, + "scribed": 38334, + "scricket": 45947, + "scrim": 21978, + "scrimmage": 25216, + "scrip": 11955, + "script": 8374, + "scripted": 40513, + "scription": 26604, + "scriptions": 39512, + "scripts": 20109, + "scripture": 27186, + "scro": 30768, + "scroll": 24160, + "scrolling": 28889, + "scrolls": 38113, + "scroo": 42263, + "scru": 7589, + "scrub": 23432, + "scrubs": 37919, + "scrum": 29047, + "scrump": 39791, + "scrumptious": 40987, + "scrutiny": 34305, + "scs": 26853, + "sct": 39284, + "scu": 8181, + "scu": 32135, + "scuba": 39053, + "scuba": 20559, + "scubadiving": 49046, + "scue": 25955, + "scul": 4948, + "scully": 36598, + "sculp": 6093, + "sculpt": 45044, + "sculpted": 41296, + "sculpting": 44389, + "sculptor": 29409, + "sculpture": 8757, + "sculptures": 20378, + "scum": 29655, + "scumb": 44525, + "scup": 21506, + "scur": 32742, + "scwx": 41966, + "scy": 27471, + "sd": 3080, + "sd": 4159, + "sda": 25548, + "sdale": 12327, + "sday": 5902, + "sday": 1376, + "sdays": 14491, + "sdc": 40992, + "sdcc": 13246, + "sden": 17241, + "sdf": 34681, + "sdg": 20177, + "sdgs": 16261, + "sdk": 40015, + "sdlive": 34561, + "sdn": 41925, + "sdsu": 41284, + "se": 567, + "se": 611, + "sea": 5970, + "sea": 2102, + "seab": 15728, + "seabir": 42558, + "seac": 35626, + "seaf": 9336, + "seafood": 12472, + "seag": 15730, + "seagu": 38076, + "seagull": 38858, + "seagulls": 42215, + "seahawks": 15341, + "seal": 21381, + "seal": 10159, + "sealed": 13358, + "sealing": 42992, + "seals": 18179, + "seam": 13710, + "seam": 44201, + "seaman": 47513, + "seamless": 29373, + "seamus": 40175, + "sean": 11406, + "sean": 6077, + "seanhannity": 43316, + "seap": 29983, + "seaport": 46418, + "sear": 1612, + "search": 23129, + "search": 1920, + "searched": 28961, + "searches": 26378, + "searching": 10626, + "seared": 29727, + "sears": 26693, + "seas": 7329, + "seas": 9556, + "seascape": 42593, + "seaside": 18867, + "season": 19288, + "season": 1367, + "seasonal": 14215, + "seasoned": 28399, + "seasoning": 43439, + "seasons": 8635, + "seat": 19670, + "seat": 4922, + "seated": 23953, + "seater": 37543, + "seating": 16240, + "seats": 6944, + "seattle": 24388, + "seattle": 6274, + "seau": 32263, + "seaw": 32658, + "seaweed": 30204, + "seaworld": 27422, + "seb": 35766, + "seb": 25171, + "sebasti": 10324, + "sebastian": 43792, + "sebastian": 13181, + "sebring": 41086, + "sec": 2875, + "sec": 5338, + "seca": 37847, + "secco": 27394, + "sece": 46297, + "seclu": 42392, + "secon": 1846, + "second": 9329, + "second": 2241, + "secondary": 13107, + "seconds": 6541, + "secre": 2460, + "secret": 20710, + "secret": 4145, + "secretari": 29515, + "secretariat": 31767, + "secretary": 6552, + "secretly": 21400, + "secrets": 9735, + "secs": 28665, + "sect": 15772, + "section": 34986, + "section": 4853, + "sectional": 21876, + "sections": 20061, + "sector": 6579, + "sectors": 22173, + "secu": 4894, + "secular": 47483, + "secular": 27560, + "secur": 2557, + "secure": 44763, + "secure": 7515, + "secured": 16848, + "secures": 31567, + "securing": 24759, + "securities": 25080, + "security": 31245, + "security": 2741, + "sed": 14034, + "sed": 1252, + "sedan": 24237, + "sedg": 46926, + "sedge": 45288, + "sedi": 29269, + "sedly": 31771, + "sedona": 46862, + "seduc": 19933, + "seductive": 43721, + "see": 1751, + "see": 862, + "seed": 14064, + "seed": 6488, + "seeded": 33688, + "seeding": 40050, + "seedlings": 47933, + "seeds": 9128, + "seeing": 3214, + "seek": 8839, + "seeker": 28011, + "seekers": 20732, + "seeking": 8592, + "seeks": 12594, + "seem": 20043, + "seem": 7523, + "seemed": 17240, + "seemingly": 25917, + "seems": 4453, + "seen": 36273, + "seen": 2041, + "seer": 32486, + "sees": 7594, + "seeyou": 41279, + "sef": 27453, + "seg": 10551, + "sega": 16122, + "segment": 15615, + "segments": 43053, + "segreg": 49117, + "segregation": 39086, + "segu": 33156, + "segun": 43087, + "seh": 27536, + "seh": 41430, + "sehun": 17705, + "sei": 13130, + "sei": 15907, + "sein": 24669, + "seine": 41378, + "seinfeld": 33706, + "seis": 25559, + "seismic": 38459, + "seiz": 22171, + "seize": 26624, + "seized": 15826, + "seizure": 36804, + "seizures": 47199, + "sek": 45515, + "sek": 25880, + "sel": 1000, + "sel": 4098, + "sela": 47006, + "selamat": 37692, + "selangor": 44402, + "selby": 43546, + "selca": 38606, + "selcaday": 35924, + "seldom": 48322, + "sele": 29137, + "selec": 3014, + "select": 8690, + "selected": 6881, + "selecting": 32696, + "selection": 6724, + "selections": 24099, + "selective": 28686, + "selects": 32902, + "selen": 19970, + "selena": 14677, + "selenagomez": 27653, + "seley": 30556, + "self": 10139, + "self": 1322, + "selfcare": 39560, + "selfi": 3007, + "selfie": 26735, + "selfie": 3666, + "selfies": 46058, + "selfies": 10050, + "selfish": 26907, + "selfless": 34236, + "sell": 10279, + "sell": 5119, + "seller": 11779, + "sellers": 16562, + "selling": 4396, + "sells": 14306, + "selma": 36652, + "sels": 42070, + "selves": 4505, + "sely": 8402, + "sem": 8645, + "sem": 17106, + "sema": 31816, + "seman": 29119, + "seman": 28378, + "semana": 41780, + "semb": 36054, + "seme": 10855, + "sement": 10714, + "sements": 31449, + "semester": 11905, + "semi": 11023, + "semi": 6684, + "semic": 26967, + "semicon": 34315, + "semiconduc": 35646, + "semiconductor": 43551, + "semifinal": 22935, + "semifinals": 21863, + "semin": 5595, + "seminar": 7269, + "seminars": 34870, + "seminary": 31655, + "seminole": 42956, + "semis": 24013, + "semit": 22628, + "semite": 23721, + "semitic": 34894, + "semitism": 25911, + "semper": 47391, + "sen": 1057, + "sen": 2249, + "sena": 21584, + "senate": 30703, + "senate": 6843, + "senator": 20871, + "senator": 8495, + "senators": 16889, + "send": 27684, + "send": 3625, + "sending": 6985, + "sends": 10817, + "sene": 25269, + "seneca": 33419, + "senegal": 28255, + "senew": 49313, + "seng": 43022, + "seng": 29971, + "senior": 19865, + "senior": 3415, + "seniors": 8138, + "senna": 36195, + "senpai": 46562, + "sens": 5218, + "sens": 22837, + "sensation": 19383, + "sensational": 23051, + "sense": 29162, + "sense": 4747, + "sensei": 36158, + "senses": 21809, + "sensi": 38802, + "sensible": 30635, + "sensing": 29236, + "sensiti": 20531, + "sensitive": 13734, + "sensitivity": 27788, + "sensor": 15330, + "sensors": 20356, + "sensory": 21831, + "sensu": 28157, + "sensual": 40860, + "sent": 6200, + "sent": 3676, + "sentence": 12737, + "sentenced": 17773, + "sentences": 25858, + "sentencing": 34394, + "senti": 19042, + "sentim": 25102, + "sentiment": 25949, + "sentimental": 40070, + "sentiments": 47450, + "sentin": 20042, + "sentinel": 23123, + "senting": 3924, + "seo": 24743, + "seo": 8622, + "seok": 34697, + "seok": 22482, + "seokjin": 45584, + "seoul": 13253, + "sep": 3212, + "sep": 10434, + "separ": 6859, + "separate": 13886, + "separated": 22163, + "separately": 41904, + "separates": 45365, + "separati": 39377, + "separating": 43480, + "separation": 22007, + "sephora": 38414, + "sepsis": 40205, + "sept": 5380, + "septe": 3672, + "september": 3707, + "septic": 34690, + "sepul": 47360, + "seq": 44379, + "sequ": 5491, + "seque": 44662, + "sequel": 15701, + "sequence": 18833, + "sequences": 47306, + "sequencing": 33484, + "sequo": 32781, + "sequoia": 42404, + "ser": 803, + "ser": 2771, + "sera": 28250, + "serbia": 19038, + "serbian": 33687, + "sere": 35770, + "seren": 7880, + "serena": 19519, + "serenawilliams": 48316, + "serendip": 45805, + "serendipity": 49386, + "serene": 28269, + "serenity": 24187, + "serge": 13477, + "serge": 35700, + "sergeant": 22049, + "sergei": 39870, + "sergey": 35390, + "sergi": 47675, + "sergio": 18359, + "seri": 2763, + "seri": 37509, + "serial": 14216, + "serie": 19752, + "seriea": 32660, + "series": 1857, + "serious": 47421, + "serious": 4770, + "seriously": 4885, + "sermon": 24884, + "sero": 48883, + "serpent": 37084, + "serpent": 35364, + "serra": 39851, + "serrano": 44236, + "sers": 13509, + "serum": 25385, + "serv": 1297, + "serv": 24571, + "servant": 20810, + "servants": 29652, + "serve": 39202, + "serve": 2838, + "served": 4740, + "server": 36458, + "server": 8398, + "serverless": 49243, + "servers": 22262, + "serves": 9915, + "servic": 27115, + "service": 21496, + "service": 2086, + "serviced": 44687, + "services": 3100, + "servicing": 41300, + "serving": 5722, + "sery": 14279, + "ses": 23708, + "ses": 1386, + "sesame": 21706, + "sese": 37128, + "sesh": 24274, + "session": 2550, + "sessions": 6327, + "set": 7965, + "set": 1167, + "setback": 43605, + "seth": 20005, + "seth": 11870, + "sethu": 38933, + "setlist": 33141, + "seton": 43799, + "sets": 4650, + "sett": 4984, + "sett": 17567, + "sette": 14613, + "setter": 23153, + "settes": 44145, + "setti": 45170, + "setting": 5264, + "settings": 18628, + "settle": 15075, + "settled": 18310, + "settlement": 16494, + "settlements": 36605, + "settlers": 35671, + "settles": 41498, + "settling": 22036, + "setup": 11092, + "seu": 31539, + "seul": 48975, + "seum": 18838, + "seun": 24209, + "seung": 32393, + "seung": 33711, + "seungri": 41627, + "seuss": 34441, + "sev": 26585, + "sev": 37600, + "seva": 42604, + "seve": 21458, + "seve": 22468, + "sevel": 17439, + "seven": 7874, + "seven": 5757, + "sevens": 29911, + "sevent": 43048, + "seventeen": 19337, + "seventh": 17568, + "seventy": 47170, + "sever": 3250, + "sever": 45557, + "several": 5560, + "severance": 26194, + "severe": 6215, + "severely": 24417, + "severn": 34626, + "severy": 34207, + "sevilla": 24947, + "seville": 34988, + "sew": 28640, + "sewage": 32777, + "sewer": 28294, + "sewing": 15974, + "sewn": 42118, + "sex": 3548, + "sex": 5937, + "sexi": 20562, + "sexiest": 25426, + "sexism": 32059, + "sexist": 33047, + "sexu": 14741, + "sexual": 6749, + "sexuality": 21244, + "sexually": 23032, + "sexy": 21019, + "sexy": 38127, + "sey": 6317, + "sey": 2258, + "seychel": 36809, + "seychelles": 38519, + "seye": 35604, + "seym": 22657, + "seymour": 25850, + "seys": 15081, + "sez": 42377, + "señ": 43368, + "sf": 4435, + "sf": 4915, + "sfa": 32675, + "sfam": 37649, + "sfb": 27930, + "sfc": 14129, + "sfest": 49024, + "sff": 42056, + "sfgiants": 20923, + "sfield": 11801, + "sfo": 39182, + "sfootball": 45259, + "sfor": 9115, + "sford": 28917, + "sforsale": 28888, + "sfw": 18073, + "sfx": 37995, + "sg": 9599, + "sg": 7611, + "sga": 33049, + "sgate": 27558, + "sgh": 47590, + "sgo": 5393, + "sgo": 21044, + "sgt": 13748, + "sh": 552, + "sh": 849, + "sha": 1514, + "sha": 3337, + "shaa": 44221, + "shab": 8323, + "shabbat": 38042, + "shabby": 28838, + "shack": 23866, + "shack": 18785, + "shad": 3182, + "shad": 23874, + "shade": 34554, + "shade": 10097, + "shaded": 43506, + "shades": 46608, + "shades": 9270, + "shadesof": 45180, + "shading": 37348, + "shado": 9325, + "shadow": 15243, + "shadow": 7068, + "shadowhun": 19931, + "shadowhunters": 24834, + "shadowing": 46092, + "shadows": 12971, + "shady": 22158, + "shaf": 12032, + "shaft": 21545, + "shag": 22439, + "shaggy": 42662, + "shah": 13203, + "shah": 8439, + "shahe": 23643, + "shaheed": 30060, + "shaheer": 43969, + "shahi": 46972, + "shahid": 25696, + "shahid": 27138, + "shahidkapoor": 29892, + "shahzad": 45915, + "shai": 47941, + "shaikh": 45712, + "shail": 37603, + "shair": 43135, + "shak": 8385, + "shake": 8206, + "shake": 8251, + "shaken": 38237, + "shaker": 26210, + "shakers": 38411, + "shakes": 19668, + "shakespe": 9890, + "shakespeare": 22499, + "shakespeare": 12488, + "shakespearesunday": 32320, + "shaking": 19101, + "shakira": 40795, + "shakti": 48593, + "shakti": 32458, + "shakur": 48915, + "shal": 15056, + "shal": 28175, + "shale": 32864, + "shall": 4742, + "shallow": 23730, + "shalom": 31339, + "sham": 6453, + "sham": 9005, + "shaman": 48727, + "shambles": 40799, + "shame": 14776, + "shame": 7593, + "shameful": 28283, + "shameless": 25380, + "shaming": 40553, + "shampoo": 23944, + "shamrock": 34199, + "shan": 5171, + "shan": 8834, + "shana": 44835, + "shand": 29101, + "shane": 26863, + "shane": 11572, + "shang": 11141, + "shanghai": 12742, + "shani": 46665, + "shank": 24685, + "shankar": 24108, + "shann": 9932, + "shannon": 22842, + "shannon": 13581, + "shant": 36610, + "shap": 5581, + "shape": 26925, + "shape": 6448, + "shaped": 10127, + "shapes": 15377, + "shaping": 18632, + "shapiro": 32110, + "shaq": 46402, + "shaq": 26843, + "shar": 1669, + "shar": 36542, + "shara": 48849, + "sharapo": 36489, + "sharapova": 36671, + "shard": 42207, + "share": 7585, + "share": 1978, + "shared": 5368, + "shareholder": 38241, + "shareholders": 34778, + "sharepoint": 39213, + "shares": 4974, + "sharethe": 49277, + "shareyour": 45890, + "shari": 27738, + "shari": 47390, + "sharia": 37244, + "sharif": 15501, + "sharing": 3567, + "sharjah": 33420, + "shark": 15836, + "shark": 7980, + "sharks": 10047, + "sharkweek": 39571, + "sharma": 10105, + "sharon": 28722, + "sharon": 14138, + "sharp": 17126, + "sharp": 8157, + "sharpe": 34374, + "sharpen": 41465, + "sharpie": 46858, + "sharply": 37185, + "shasta": 46727, + "shat": 12169, + "shat": 44388, + "shatter": 45008, + "shattered": 26820, + "shau": 13750, + "shaun": 23446, + "shaun": 16669, + "shav": 11410, + "shave": 17735, + "shaved": 25571, + "shaving": 24261, + "shaw": 6122, + "shaw": 6805, + "shawa": 46413, + "shawl": 35132, + "shawn": 16677, + "shawn": 10970, + "shawnee": 48060, + "shawnmendes": 27277, + "shawty": 38026, + "shay": 10778, + "shay": 18361, + "shaykh": 47223, + "shaz": 18618, + "shazam": 29063, + "shc": 43419, + "shd": 37729, + "she": 1729, + "she": 1043, + "shea": 20407, + "shead": 44287, + "shead": 20434, + "shealth": 41743, + "shealth": 22197, + "shear": 27974, + "shear": 32108, + "shearer": 40505, + "sheath": 45637, + "shed": 16586, + "shed": 1492, + "shedding": 33608, + "sheds": 25921, + "shee": 23450, + "shee": 34321, + "sheed": 26105, + "sheehan": 41809, + "sheen": 25025, + "sheep": 23604, + "sheep": 9629, + "sheer": 17577, + "sheeran": 18561, + "sheet": 7298, + "sheets": 12744, + "shef": 8237, + "sheff": 38844, + "sheff": 43821, + "sheffiel": 26940, + "sheffield": 41763, + "sheffield": 10420, + "sheffieldissuper": 33628, + "sheh": 31667, + "sheikh": 15031, + "sheil": 42765, + "sheila": 25734, + "shek": 33285, + "shel": 3159, + "shelby": 36906, + "shelby": 16885, + "sheldon": 25079, + "shelf": 10955, + "shell": 23374, + "shell": 6648, + "shelley": 22497, + "shelling": 43166, + "shells": 19265, + "shelly": 37461, + "shelter": 8599, + "sheltered": 48070, + "shelters": 24312, + "shelton": 24471, + "shelves": 16225, + "shem": 40299, + "shen": 10154, + "shen": 31098, + "shenan": 20965, + "shenando": 44666, + "shenanigans": 26590, + "shenko": 39751, + "shenmue": 48279, + "shenzhen": 38970, + "shep": 33757, + "shep": 44857, + "shepard": 26810, + "shepher": 11008, + "shepherd": 13242, + "shepherds": 42792, + "sheppard": 37304, + "sher": 3570, + "sher": 4510, + "sheraton": 39400, + "shere": 21507, + "sheri": 9235, + "sheridan": 27085, + "sheriff": 10309, + "sherlock": 17294, + "sherman": 17822, + "sherry": 44348, + "sherry": 24689, + "shers": 14141, + "sherwood": 24527, + "sheryl": 39773, + "shes": 45514, + "shes": 2502, + "shet": 15850, + "shetland": 29595, + "shetty": 25533, + "shev": 45182, + "sheva": 45132, + "shh": 35025, + "shhh": 36932, + "shi": 823, + "shi": 3533, + "shia": 23791, + "shibu": 36177, + "shibuya": 41623, + "shie": 26638, + "shiel": 33413, + "shield": 8670, + "shields": 19085, + "shies": 35312, + "shif": 35317, + "shift": 43767, + "shift": 6905, + "shifted": 34429, + "shifter": 48944, + "shifting": 21992, + "shifts": 23957, + "shik": 36980, + "shil": 14370, + "shill": 32121, + "shill": 30090, + "shilpa": 47062, + "shilpa": 40690, + "shim": 11986, + "shim": 32780, + "shima": 14382, + "shimano": 48904, + "shimi": 40517, + "shimmer": 38792, + "shin": 5664, + "shin": 11784, + "shinde": 41516, + "shine": 17582, + "shine": 3780, + "shinee": 19660, + "shines": 16015, + "shing": 38641, + "shing": 1743, + "shining": 10485, + "shino": 43074, + "shiny": 12190, + "ship": 7645, + "ship": 1158, + "shipment": 28553, + "shipp": 34709, + "shipped": 15279, + "shippers": 44789, + "shipping": 5721, + "ships": 3262, + "shipwreck": 48878, + "shipy": 26828, + "shipyard": 31273, + "shir": 1956, + "shiraz": 35618, + "shire": 11975, + "shire": 2968, + "shirehour": 32456, + "shirley": 18189, + "shiro": 26048, + "shirt": 27576, + "shirt": 2523, + "shirtless": 28959, + "shirts": 5803, + "shistory": 34979, + "shiv": 18042, + "shiv": 37121, + "shiva": 33881, + "shiva": 21174, + "shka": 38944, + "shld": 49359, + "shma": 48074, + "shment": 8802, + "shments": 18822, + "sho": 719, + "sho": 13756, + "shock": 19617, + "shock": 8736, + "shocked": 15787, + "shocker": 37971, + "shockey": 22258, + "shocking": 13394, + "shocks": 31886, + "shoe": 16308, + "shoe": 7342, + "shoes": 49391, + "shoes": 4079, + "shol": 21472, + "sholm": 44139, + "shome": 42701, + "shon": 19526, + "shon": 37621, + "shone": 47173, + "shoo": 1975, + "shook": 20730, + "shoops": 29956, + "shoot": 12531, + "shoot": 3704, + "shooter": 13645, + "shooters": 31902, + "shooting": 3992, + "shootings": 26753, + "shootout": 20666, + "shoots": 14144, + "shop": 5738, + "shop": 1557, + "shopify": 47949, + "shoplocal": 21775, + "shopp": 38486, + "shoppe": 38236, + "shopped": 28088, + "shopper": 24346, + "shoppers": 22316, + "shopping": 42101, + "shopping": 4266, + "shops": 6467, + "shopsmall": 35942, + "shor": 3209, + "shore": 14717, + "shore": 5928, + "shored": 33140, + "shoreditch": 35042, + "shoreline": 34807, + "shores": 18102, + "short": 6803, + "short": 3005, + "shortage": 19910, + "shortages": 38730, + "shortcuts": 45793, + "shorten": 41711, + "shorter": 20350, + "shortest": 33717, + "shortfilm": 37204, + "shorth": 37397, + "shortlist": 28163, + "shortlisted": 20631, + "shortly": 11967, + "shorts": 9680, + "shorty": 33502, + "shot": 9805, + "shot": 2000, + "shotel": 42365, + "shotgun": 21643, + "shots": 5342, + "shou": 3890, + "shoul": 29847, + "should": 14947, + "should": 1535, + "shoulder": 8476, + "shoulders": 18738, + "shouldn": 9416, + "shour": 20025, + "shouse": 28671, + "shout": 7335, + "shout": 5214, + "shouted": 44397, + "shouting": 26464, + "shoutout": 8274, + "shouts": 26709, + "shovel": 31778, + "show": 2133, + "show": 1080, + "showbiz": 34156, + "showcas": 14290, + "showcase": 7265, + "showcased": 35786, + "showcases": 26266, + "showcasing": 17036, + "showdown": 15576, + "showed": 7150, + "shower": 7777, + "showers": 9893, + "showing": 3649, + "shown": 8506, + "showroom": 16821, + "shows": 2665, + "showtime": 40576, + "showtime": 15442, + "showyour": 46733, + "shp": 38341, + "shq": 21145, + "shr": 10118, + "shra": 21360, + "shradd": 28172, + "shraddha": 35208, + "shraddhakapoor": 40385, + "shre": 12101, + "shred": 19756, + "shred": 33017, + "shredded": 31772, + "shredding": 45534, + "shree": 37410, + "shrek": 35009, + "shrews": 26411, + "shrewsbury": 30921, + "shri": 8838, + "shri": 11424, + "shrimp": 12727, + "shrin": 24865, + "shrine": 16156, + "shrink": 34957, + "shrinking": 41243, + "shrm": 44163, + "shro": 15259, + "shroff": 32081, + "shrop": 22630, + "shropshire": 26344, + "shru": 14911, + "shrub": 41464, + "shrubs": 47975, + "shrun": 46767, + "shs": 16184, + "sht": 44210, + "shti": 38927, + "shu": 2872, + "shu": 17651, + "shua": 33771, + "shub": 40552, + "shud": 45782, + "shuff": 42641, + "shuffle": 21681, + "shui": 45473, + "shuk": 29927, + "shukla": 46829, + "shul": 30721, + "shum": 37383, + "shun": 24479, + "shun": 39594, + "shur": 41032, + "shut": 8702, + "shut": 8282, + "shutdown": 16051, + "shutout": 24385, + "shuts": 28313, + "shutt": 31866, + "shutter": 36235, + "shutter": 33902, + "shutters": 46894, + "shutting": 31383, + "shuttle": 15842, + "shwar": 41640, + "shy": 22678, + "shy": 9682, + "si": 564, + "si": 2990, + "sia": 2357, + "siam": 29686, + "siam": 48248, + "siamese": 43161, + "sian": 28510, + "sian": 6221, + "sians": 26583, + "sias": 28645, + "siber": 22206, + "siberia": 39969, + "siberian": 34058, + "sibl": 14338, + "sible": 14507, + "sibling": 43060, + "sibling": 23779, + "siblings": 17156, + "sic": 8278, + "sic": 1118, + "sica": 34125, + "sical": 33875, + "sichuan": 48950, + "sicilian": 45292, + "sicily": 23179, + "sick": 11143, + "sick": 5359, + "sickest": 47972, + "sickle": 41459, + "sickness": 28898, + "sics": 26297, + "sid": 10117, + "sid": 15119, + "sidd": 19842, + "siddi": 35227, + "side": 5869, + "side": 1145, + "sided": 21061, + "sidekick": 44683, + "sidel": 43557, + "sideline": 32056, + "sidelines": 31046, + "sider": 30581, + "siders": 41249, + "sides": 7578, + "sideshow": 46789, + "sidewalk": 23278, + "sidewalks": 43583, + "sideways": 35593, + "siding": 38758, + "sidney": 22598, + "sie": 8533, + "sie": 5685, + "sieg": 49203, + "siege": 18460, + "siegel": 48559, + "siem": 18434, + "siemens": 30147, + "siempre": 44030, + "siena": 33336, + "sienna": 40373, + "sier": 10028, + "sier": 7444, + "sierra": 13552, + "siers": 35923, + "sies": 16367, + "siest": 18323, + "sif": 29300, + "sig": 872, + "sig": 19145, + "sigh": 36303, + "sigh": 15505, + "sighs": 44579, + "sight": 16897, + "sight": 6329, + "sighted": 33034, + "sighting": 17507, + "sightings": 30004, + "sights": 17364, + "sightseeing": 34210, + "sigma": 45075, + "sigma": 15697, + "sign": 5538, + "sign": 2292, + "signage": 21156, + "signal": 10781, + "signaling": 38492, + "signalling": 48426, + "signals": 17150, + "signation": 24347, + "signature": 9189, + "signatures": 21865, + "signed": 3163, + "signee": 39778, + "signi": 34023, + "signific": 6374, + "significance": 23769, + "significant": 8735, + "significantly": 16187, + "signing": 4401, + "signingday": 40282, + "signings": 27731, + "signs": 4659, + "signup": 40791, + "sigue": 49401, + "sii": 36672, + "sik": 19974, + "sik": 22413, + "sika": 31144, + "sikh": 21829, + "sikhs": 45426, + "sil": 1556, + "sil": 8315, + "sila": 41754, + "sile": 37620, + "silen": 39048, + "silence": 8462, + "silenced": 45415, + "silent": 30352, + "silent": 8487, + "silently": 42640, + "silhou": 20589, + "silhouette": 26149, + "silic": 23830, + "silicon": 32412, + "silicon": 17888, + "silicone": 28221, + "silk": 25891, + "silk": 9743, + "silky": 29554, + "sill": 42468, + "sill": 48024, + "silly": 11883, + "silon": 31841, + "sils": 39708, + "silva": 16489, + "silve": 37697, + "silver": 7525, + "silver": 3467, + "silverado": 46160, + "silverstone": 29666, + "silvia": 37289, + "sim": 5026, + "sim": 10740, + "sima": 35871, + "simba": 39492, + "simcoe": 47148, + "sime": 28329, + "simi": 38073, + "simil": 7202, + "similar": 8547, + "similarities": 34716, + "simm": 13001, + "simmons": 14699, + "simo": 37171, + "simon": 8796, + "simon": 6668, + "simona": 46277, + "simone": 19062, + "simons": 33097, + "simp": 2542, + "simple": 19018, + "simple": 4129, + "simpler": 35489, + "simplest": 39588, + "simpli": 16868, + "simplicity": 21262, + "simplified": 36647, + "simplify": 35479, + "simply": 25637, + "simply": 6151, + "simpson": 41805, + "simpson": 11750, + "simpsons": 21092, + "sims": 14021, + "simul": 9845, + "simulated": 46395, + "simulation": 18610, + "simulator": 20821, + "simultaneous": 48816, + "simultaneously": 28575, + "sin": 1303, + "sin": 3421, + "sina": 19541, + "sinai": 33226, + "sinatra": 27262, + "sinc": 30464, + "since": 1855, + "sincere": 24513, + "sincere": 24886, + "sincerely": 25673, + "sinclair": 23100, + "sind": 39598, + "sind": 30877, + "sindh": 20754, + "sindia": 48038, + "sine": 22741, + "sine": 33793, + "sinfo": 47178, + "sing": 1387, + "sing": 1197, + "singapo": 27861, + "singapore": 28879, + "singapore": 6754, + "singer": 33880, + "singer": 5108, + "singers": 15613, + "singersongwriter": 44585, + "singh": 19445, + "singh": 5715, + "singing": 5864, + "single": 19524, + "single": 2688, + "singles": 12025, + "singleton": 46247, + "singly": 16619, + "sings": 13635, + "singul": 34003, + "singular": 44009, + "singularity": 48410, + "sinha": 29416, + "sini": 41781, + "sini": 26319, + "sinister": 31313, + "sink": 37232, + "sink": 14551, + "sinking": 27949, + "sinks": 32710, + "sinn": 36315, + "sinner": 45380, + "sinners": 43436, + "sino": 29759, + "sins": 9345, + "sinthe": 30737, + "sinu": 37351, + "sinus": 47535, + "sio": 10807, + "siob": 40954, + "siology": 46315, + "sion": 5676, + "sion": 1015, + "sional": 14533, + "sionally": 30754, + "sions": 4060, + "sioux": 44695, + "sioux": 24954, + "sip": 16096, + "sipping": 28527, + "sir": 10708, + "sir": 3846, + "sire": 28450, + "siren": 33026, + "sirens": 35907, + "siri": 13986, + "siri": 18394, + "sirius": 23574, + "sirius": 34999, + "siriusxm": 29833, + "sirloin": 46828, + "sis": 18132, + "sis": 2580, + "sisd": 27132, + "sisi": 37892, + "siss": 42929, + "sissy": 27564, + "sist": 20520, + "sista": 37448, + "sister": 17417, + "sister": 3677, + "sisterhood": 37313, + "sisters": 6404, + "sit": 7387, + "sit": 4037, + "sitcom": 30426, + "site": 26792, + "site": 1988, + "sites": 7236, + "sith": 41499, + "sito": 42613, + "sits": 12726, + "sitt": 42988, + "sitter": 40777, + "sittin": 40887, + "sitting": 4919, + "situ": 5562, + "situ": 42536, + "situated": 22030, + "situation": 7144, + "situations": 19096, + "sity": 38177, + "sity": 5477, + "siu": 40174, + "sium": 8090, + "sius": 27595, + "siva": 20991, + "sivan": 36931, + "sive": 23572, + "sive": 1875, + "sively": 10343, + "siveness": 39667, + "sives": 23896, + "sivity": 42738, + "siwon": 29055, + "six": 5968, + "six": 4093, + "sixers": 25941, + "sixteen": 28677, + "sixth": 12909, + "sixties": 44948, + "sixty": 32588, + "siya": 44440, + "size": 38377, + "size": 3235, + "sized": 9832, + "sizes": 10253, + "sizing": 28330, + "sizz": 23778, + "sizzle": 47890, + "sizzling": 35799, + "sj": 7536, + "sj": 16010, + "sjo": 42012, + "sk": 909, + "sk": 2058, + "ska": 7495, + "skag": 31948, + "skan": 46772, + "skar": 27587, + "skar": 26835, + "skate": 13740, + "skate": 12745, + "skateboard": 31777, + "skateboarding": 31352, + "skater": 30337, + "skaters": 39824, + "skates": 31479, + "skc": 44551, + "ske": 6261, + "ske": 25516, + "skel": 36564, + "skelet": 27075, + "skeletal": 37369, + "skeleton": 20062, + "skeletons": 48874, + "skell": 40801, + "skep": 27772, + "skeptical": 44934, + "sker": 37640, + "sker": 33600, + "sket": 3744, + "sketch": 11767, + "sketch": 5269, + "sketchbook": 18899, + "sketched": 38581, + "sketches": 17622, + "sketching": 23228, + "sketchy": 41582, + "skey": 37453, + "ski": 3327, + "ski": 3428, + "skid": 36574, + "skid": 32099, + "skier": 42585, + "skies": 7244, + "skiing": 14400, + "skil": 24543, + "skill": 15598, + "skill": 10604, + "skilled": 17535, + "skillet": 40568, + "skills": 4113, + "skim": 33191, + "skin": 5821, + "skin": 3575, + "skincare": 12648, + "skine": 37300, + "sking": 46215, + "skinned": 42199, + "skinner": 30261, + "skinny": 42729, + "skinny": 15457, + "skins": 11594, + "skip": 39793, + "skip": 14296, + "skipped": 40639, + "skipper": 22226, + "skipping": 34867, + "skir": 8919, + "skirt": 12386, + "skirts": 24840, + "skis": 32843, + "skit": 43573, + "skitchen": 42820, + "skittles": 43213, + "sko": 15141, + "sko": 23493, + "skoda": 38668, + "skool": 26743, + "skril": 43149, + "skrillex": 43651, + "sks": 48136, + "sku": 10836, + "skul": 17561, + "skull": 34068, + "skull": 12092, + "skulls": 31804, + "skunk": 42194, + "sky": 3075, + "sky": 2390, + "skybet": 45540, + "skye": 21475, + "skyl": 43554, + "skylar": 45411, + "skyline": 14606, + "skymap": 41734, + "skynews": 40977, + "skype": 17069, + "skyrim": 33693, + "skysports": 39845, + "skysports": 46725, + "skywalker": 32936, + "sl": 2621, + "sl": 7489, + "sla": 2725, + "sla": 26707, + "slab": 24241, + "slabs": 42818, + "slack": 37108, + "slack": 30142, + "slade": 33546, + "slain": 35972, + "slalom": 43540, + "slam": 14891, + "slam": 10131, + "slammed": 29772, + "slams": 18907, + "slan": 44663, + "slan": 47193, + "sland": 11294, + "slang": 33655, + "slap": 48830, + "slap": 21751, + "slapped": 38861, + "slaps": 46796, + "slash": 19749, + "slat": 38966, + "slate": 17919, + "slated": 36094, + "slater": 25968, + "slaugh": 26782, + "slaughter": 19815, + "slaughtered": 46615, + "slav": 47292, + "slava": 41797, + "slave": 14029, + "slavery": 15754, + "slaves": 23833, + "slaw": 28178, + "slay": 48319, + "slay": 19380, + "slayed": 44870, + "slayer": 21605, + "slaying": 27812, + "slays": 45648, + "slc": 21972, + "sle": 1709, + "sleague": 23336, + "sled": 28438, + "sledge": 48750, + "slee": 17642, + "slee": 38977, + "sleek": 23187, + "sleep": 4656, + "sleep": 3840, + "sleeper": 28709, + "sleeping": 6982, + "sleepless": 39779, + "sleepover": 39415, + "sleeps": 16610, + "sleepy": 32572, + "sleepy": 14497, + "sleet": 36948, + "sleeve": 35270, + "sleeve": 10536, + "sleeveless": 38049, + "sleeves": 19691, + "sleg": 47650, + "sleigh": 30865, + "slender": 40331, + "slept": 20388, + "sler": 14066, + "sley": 17198, + "sley": 6496, + "sli": 1811, + "sli": 44824, + "slic": 19692, + "slice": 13431, + "sliced": 28121, + "slices": 28424, + "slick": 18341, + "slide": 27828, + "slide": 8837, + "slider": 37861, + "sliders": 40700, + "slides": 15939, + "slideshow": 42817, + "sliding": 21468, + "slife": 15448, + "sliga": 21080, + "slight": 14297, + "slightly": 8456, + "sligo": 30424, + "slike": 38744, + "slim": 35226, + "slim": 12364, + "slime": 29107, + "sling": 28021, + "sling": 32607, + "slinger": 47269, + "slions": 43363, + "slip": 39785, + "slip": 12105, + "slipknot": 41816, + "slipped": 30344, + "slipper": 39644, + "slippers": 26509, + "slippery": 30814, + "slipping": 36301, + "slips": 30632, + "slist": 33749, + "slit": 47011, + "slive": 31652, + "slo": 4303, + "slo": 36083, + "sloan": 29110, + "sloane": 41553, + "slogan": 23398, + "slogans": 42795, + "slope": 22769, + "slopes": 24066, + "sloppy": 36154, + "slot": 14500, + "sloth": 30007, + "slots": 19238, + "slou": 48493, + "slovak": 23315, + "slovakia": 25994, + "sloven": 17018, + "slovenia": 21037, + "slow": 6674, + "slow": 5444, + "slowdown": 38421, + "slowed": 43793, + "slower": 29181, + "slowing": 29839, + "slowly": 9568, + "slows": 46855, + "slp": 45599, + "slr": 21325, + "sls": 33651, + "slt": 39283, + "sltd": 36388, + "slu": 7224, + "slu": 47456, + "slug": 34190, + "slugger": 48671, + "slum": 46754, + "slumber": 44295, + "slump": 35588, + "slur": 30476, + "slush": 39815, + "slv": 45526, + "sly": 28145, + "sly": 21062, + "sm": 978, + "sm": 2764, + "sma": 4357, + "sma": 11854, + "smack": 21280, + "smack": 30026, + "smackdown": 26138, + "smafia": 47686, + "smag": 32212, + "smal": 48379, + "small": 5244, + "small": 2442, + "smallbiz": 41724, + "smallbiz": 18987, + "smallbusiness": 21316, + "smalle": 18490, + "smaller": 12431, + "smallest": 18686, + "smalls": 41696, + "sman": 9612, + "smar": 3201, + "smart": 5383, + "smart": 4115, + "smartcities": 34822, + "smartcity": 33973, + "smarter": 18990, + "smartest": 37092, + "smarthome": 47726, + "smartphone": 11290, + "smartphones": 22212, + "smartwatch": 35798, + "smash": 17258, + "smash": 10332, + "smashbros": 44897, + "smashed": 18410, + "smashes": 45657, + "smashing": 19632, + "smatter": 16537, + "smb": 30446, + "smc": 31375, + "smc": 28312, + "smd": 34582, + "sme": 11758, + "sme": 15650, + "smear": 37546, + "smel": 28476, + "smell": 9688, + "smelling": 32493, + "smells": 14668, + "smelly": 46145, + "smen": 15961, + "smer": 48526, + "smere": 39629, + "smes": 26141, + "smg": 46876, + "smh": 9623, + "smi": 5655, + "smi": 40049, + "smil": 33937, + "smile": 27641, + "smile": 3490, + "smiled": 34362, + "smiles": 8726, + "smiley": 22925, + "smiling": 9200, + "smir": 24667, + "smith": 10527, + "smith": 2915, + "smiths": 27872, + "smithson": 25372, + "smithsonian": 31209, + "smm": 19510, + "smma": 42370, + "smo": 2513, + "smo": 13437, + "smobile": 38923, + "smog": 44425, + "smoke": 20381, + "smoke": 6664, + "smoked": 11161, + "smoker": 32348, + "smokers": 29571, + "smokes": 40336, + "smokey": 23670, + "smokin": 32825, + "smoking": 9038, + "smoky": 25549, + "smol": 29939, + "smol": 40403, + "smoo": 5430, + "smooth": 10958, + "smooth": 8990, + "smoother": 44271, + "smoothie": 16668, + "smoothies": 34458, + "smoothly": 32380, + "smore": 48323, + "smp": 32260, + "smriti": 49227, + "sms": 10409, + "smt": 26672, + "smtown": 26072, + "smu": 10878, + "smu": 30458, + "smug": 41021, + "smugg": 28130, + "smuggling": 34146, + "smur": 24708, + "smusic": 19191, + "smw": 44929, + "smx": 46699, + "smy": 14381, + "smyth": 44822, + "sn": 1672, + "sn": 5844, + "sna": 4032, + "snack": 47548, + "snack": 10039, + "snacking": 46474, + "snacks": 12349, + "snag": 34789, + "snag": 28043, + "snagged": 48534, + "snail": 23132, + "snails": 34928, + "snake": 30133, + "snake": 8798, + "snakes": 19605, + "snap": 4578, + "snap": 7404, + "snapback": 31234, + "snapchat": 7799, + "snapmatic": 45907, + "snapp": 10185, + "snapped": 15543, + "snapper": 31677, + "snapping": 31581, + "snaps": 16890, + "snapshot": 18243, + "snar": 30810, + "snare": 40651, + "snat": 18457, + "snatch": 35302, + "snatched": 44821, + "snation": 14362, + "snazzy": 48963, + "snc": 39918, + "sne": 3791, + "sne": 46503, + "sneak": 27871, + "sneak": 6917, + "sneaker": 31698, + "sneaker": 24781, + "sneakers": 17397, + "sneaking": 34633, + "sneakpeek": 47831, + "sneaks": 40926, + "sneaky": 21293, + "snee": 42095, + "snell": 46410, + "sner": 31424, + "snes": 26667, + "snews": 18623, + "snf": 47651, + "sng": 41549, + "snhl": 43093, + "sni": 7186, + "sni": 35570, + "snickers": 49127, + "sniff": 37841, + "snip": 42954, + "sniper": 22157, + "snippet": 37531, + "snippets": 44001, + "snl": 16011, + "sno": 8567, + "sno": 17802, + "snoo": 11352, + "snooker": 25657, + "snoop": 44503, + "snoop": 27754, + "snoopdogg": 48388, + "snoopy": 41967, + "snooze": 40718, + "snor": 16590, + "snoring": 44560, + "snorkel": 44285, + "snorkeling": 48103, + "snow": 3880, + "snow": 2583, + "snowball": 39254, + "snowboard": 33403, + "snowboarding": 32397, + "snowday": 37982, + "snowden": 32154, + "snowdon": 47107, + "snowdonia": 36088, + "snowed": 45073, + "snowfall": 21714, + "snowflake": 33447, + "snowflakes": 38618, + "snowing": 21443, + "snowman": 22668, + "snowstorm": 38777, + "snowy": 14191, + "snp": 15301, + "sns": 36343, + "snsd": 27961, + "snt": 34834, + "snu": 9694, + "snuck": 36522, + "snug": 45169, + "snuggle": 31327, + "snuggles": 48165, + "sny": 17526, + "snyder": 22106, + "snz": 37678, + "so": 759, + "so": 706, + "soa": 39584, + "soak": 24839, + "soaked": 26592, + "soaking": 26750, + "soap": 26086, + "soap": 11088, + "soaps": 40958, + "soar": 48997, + "soar": 22241, + "soaring": 27968, + "soars": 41348, + "sob": 24900, + "sob": 35507, + "sobbing": 36691, + "sober": 30969, + "sober": 24487, + "sobre": 42768, + "sobri": 49308, + "sobs": 43636, + "soc": 3253, + "soc": 7741, + "soca": 49239, + "socal": 46470, + "socal": 20450, + "soccer": 16268, + "soccer": 4233, + "socceroos": 41997, + "socent": 30831, + "sochi": 21014, + "soci": 1720, + "social": 4803, + "social": 2346, + "socialism": 23372, + "socialist": 18450, + "socialists": 43839, + "socially": 24555, + "socialmedi": 23813, + "socialmedia": 9600, + "socialmediamarketing": 31790, + "societal": 40058, + "societies": 25855, + "society": 3757, + "socio": 44319, + "socio": 42790, + "sociology": 32373, + "sock": 29801, + "sock": 18277, + "socket": 28657, + "socks": 8774, + "socorro": 46409, + "socute": 45086, + "sod": 31435, + "soda": 13533, + "sodium": 29070, + "soe": 44136, + "soe": 25498, + "soever": 34024, + "sof": 1571, + "sof": 41187, + "sofa": 15723, + "soff": 35290, + "soff": 30684, + "sofficial": 20563, + "sofi": 41537, + "sofia": 18914, + "sofinstagram": 17301, + "soft": 12778, + "soft": 3773, + "softball": 8369, + "softer": 44462, + "softhe": 23127, + "softly": 34958, + "software": 35941, + "software": 5847, + "softwitter": 11311, + "sog": 44775, + "soggy": 41168, + "sohn": 49267, + "soho": 47749, + "soho": 17592, + "soi": 40495, + "soil": 33417, + "soil": 9216, + "soils": 34891, + "soir": 43427, + "sok": 43456, + "sol": 1175, + "sol": 9941, + "sola": 40086, + "solace": 42567, + "solar": 16990, + "solar": 5199, + "solareclipse": 44727, + "sold": 33116, + "sold": 3939, + "soldi": 5098, + "soldier": 9355, + "soldiers": 7547, + "sole": 10519, + "sole": 8576, + "soleil": 33148, + "solely": 27913, + "solent": 47783, + "soles": 22682, + "soli": 3911, + "solic": 19369, + "solicitor": 45647, + "solicitors": 46000, + "solid": 30626, + "solid": 6148, + "solidar": 10415, + "solidarity": 10983, + "solidi": 46136, + "solids": 49070, + "solihull": 45293, + "solit": 37039, + "solitaire": 47257, + "solitary": 33094, + "solitude": 33199, + "solo": 17626, + "solo": 5797, + "soloist": 46391, + "solom": 15768, + "solomon": 19785, + "solos": 44868, + "solst": 20298, + "solstice": 21359, + "solu": 2487, + "solution": 4575, + "solutions": 5140, + "solve": 8917, + "solved": 13451, + "solves": 42740, + "solving": 15581, + "som": 734, + "som": 10672, + "soma": 36170, + "somal": 40281, + "somali": 26231, + "somalia": 17051, + "somaliland": 43315, + "some": 1132, + "some": 836, + "somebody": 8305, + "someday": 17127, + "somehow": 11735, + "someone": 2100, + "somer": 9656, + "somerhalder": 33990, + "somerset": 14926, + "somerville": 41409, + "somes": 38124, + "somethin": 33541, + "something": 28316, + "something": 2006, + "sometime": 21464, + "sometimes": 4237, + "somewhat": 17864, + "somewhere": 8119, + "somm": 42726, + "somme": 30625, + "sommer": 44954, + "somos": 24951, + "son": 1176, + "son": 825, + "sona": 21249, + "sonam": 40096, + "sonar": 48235, + "sonata": 37009, + "sone": 29599, + "song": 6868, + "song": 2295, + "songs": 4641, + "songwriter": 13034, + "songwriters": 39583, + "songwriting": 33567, + "songz": 49302, + "soni": 34899, + "soni": 35911, + "sonia": 20409, + "sonic": 23785, + "sonic": 9132, + "sonics": 48511, + "sonja": 46102, + "sonline": 23412, + "sonny": 43000, + "sonny": 20880, + "sono": 44109, + "sonom": 48596, + "sonoma": 26269, + "sons": 5502, + "sonsof": 46676, + "sont": 31063, + "sonthe": 40923, + "sony": 16042, + "sony": 8748, + "sonya": 39172, + "soo": 5517, + "soo": 8602, + "soom": 39771, + "soon": 27559, + "soon": 1745, + "sooner": 18968, + "sooners": 30449, + "sooo": 11526, + "soooo": 13658, + "sooooo": 21199, + "soooooo": 34859, + "soor": 46698, + "soothe": 44424, + "soothing": 27730, + "sop": 3974, + "sop": 19194, + "soph": 34963, + "sophi": 6192, + "sophia": 16790, + "sophie": 38648, + "sophie": 12357, + "sophistic": 17646, + "sophisticated": 20833, + "sophom": 13696, + "sophomore": 15242, + "sophomores": 47645, + "soprano": 28880, + "soproud": 44479, + "sor": 1852, + "sor": 16872, + "sora": 38719, + "sorbet": 39994, + "sore": 43330, + "sore": 15454, + "sored": 6731, + "soren": 38907, + "sorg": 28152, + "sori": 38588, + "sorority": 30059, + "soros": 33248, + "sorren": 44012, + "sorrow": 28020, + "sorrows": 47924, + "sorry": 25745, + "sorry": 3675, + "sorrynotsorry": 37105, + "sort": 8450, + "sorta": 34700, + "sorted": 13221, + "sorting": 19198, + "sorts": 12577, + "sory": 16257, + "sos": 25145, + "sos": 5792, + "sosa": 45433, + "sosfam": 47709, + "sot": 41542, + "sot": 34116, + "sothe": 32145, + "sotho": 45496, + "soto": 27947, + "sotto": 26047, + "sotu": 32286, + "sou": 1101, + "sou": 24293, + "sought": 18874, + "soul": 8701, + "soul": 3755, + "soulful": 30196, + "soulmate": 38130, + "souls": 10951, + "soun": 19474, + "sound": 5236, + "sound": 3608, + "soundcheck": 31394, + "soundcloud": 15190, + "sounded": 28287, + "sounders": 44933, + "sounding": 21351, + "sounds": 5694, + "soundtrack": 11389, + "soup": 7077, + "soups": 45052, + "sour": 2235, + "sour": 12049, + "source": 23698, + "source": 3634, + "sourced": 23340, + "sources": 5124, + "sourcing": 19574, + "sourdough": 29921, + "souri": 11674, + "sous": 32093, + "sousa": 46296, + "sout": 38156, + "sout": 32732, + "south": 2938, + "south": 2045, + "southafrica": 15184, + "southampton": 15767, + "southbank": 44173, + "southbound": 22932, + "southeast": 13942, + "southeastern": 26813, + "southend": 25583, + "souther": 33330, + "southern": 17704, + "southern": 5036, + "southgate": 47262, + "southkorea": 43552, + "southport": 37446, + "southside": 36436, + "southsudan": 30419, + "southwark": 39098, + "southwe": 46443, + "southwest": 13320, + "southwestern": 30157, + "souven": 20210, + "souvenir": 24811, + "souvenirs": 48460, + "souza": 29424, + "sov": 29737, + "sover": 31876, + "sovere": 17736, + "sovereign": 29418, + "sovereign": 26337, + "sovereignty": 31701, + "soviet": 14274, + "sow": 33089, + "sowe": 36130, + "soweto": 47070, + "sown": 49369, + "sox": 39556, + "sox": 8657, + "soy": 16524, + "soy": 15010, + "soybean": 34606, + "soybeans": 40840, + "soyu": 39578, + "soyuz": 43842, + "sp": 588, + "sp": 4393, + "spa": 7852, + "spa": 6692, + "spac": 10336, + "space": 7857, + "space": 2138, + "spacecraft": 25940, + "spaces": 9006, + "spaceship": 34317, + "spacex": 22511, + "spacey": 48770, + "spacious": 24769, + "spad": 45362, + "spade": 32562, + "spades": 48368, + "spaghetti": 18440, + "spain": 5083, + "spal": 26018, + "spam": 29712, + "spam": 14624, + "span": 4270, + "span": 14537, + "spandex": 41686, + "spani": 16721, + "spaniel": 35435, + "spanish": 29966, + "spanish": 6013, + "spann": 25323, + "spanning": 38638, + "spans": 45407, + "spaper": 34548, + "spar": 3378, + "spar": 34576, + "spare": 12615, + "spares": 39505, + "spark": 9555, + "spark": 11047, + "sparked": 32647, + "sparkle": 18287, + "sparkles": 36410, + "sparkling": 17893, + "sparkly": 30542, + "sparks": 15046, + "sparky": 47198, + "sparring": 42161, + "sparrow": 22888, + "spart": 10143, + "sparta": 38401, + "spartan": 26582, + "spartan": 24225, + "spartans": 20457, + "sparty": 36477, + "spas": 31714, + "spati": 19200, + "spatial": 22022, + "spaw": 31605, + "spawn": 29166, + "spay": 40634, + "spc": 20492, + "spca": 37018, + "spd": 37717, + "spd": 28307, + "spdwy": 45981, + "spe": 876, + "spe": 36676, + "speak": 20599, + "speak": 4208, + "speake": 46077, + "speaker": 25764, + "speaker": 4914, + "speakers": 7675, + "speaking": 3714, + "speaks": 5661, + "spear": 23277, + "spear": 30420, + "speare": 43859, + "spears": 20242, + "spec": 1711, + "spec": 18596, + "speci": 1969, + "special": 11422, + "special": 1689, + "specialist": 10630, + "specialists": 21719, + "speciality": 46904, + "specialized": 23265, + "specializes": 48533, + "specially": 4513, + "specials": 11983, + "specialty": 18262, + "species": 6330, + "specific": 10528, + "specifically": 17174, + "specification": 46394, + "specifications": 39705, + "specified": 48114, + "specimen": 30263, + "specimens": 42715, + "specs": 24093, + "spect": 3416, + "spectac": 7242, + "spectacle": 34342, + "spectacular": 8404, + "spectator": 32372, + "spectators": 39306, + "spective": 6633, + "spector": 48676, + "spectral": 45441, + "spectre": 35998, + "spectro": 27646, + "spectrum": 13532, + "specul": 19209, + "speculation": 30898, + "sped": 38813, + "spee": 4050, + "speech": 19556, + "speech": 4902, + "speeches": 25208, + "speechless": 23152, + "speed": 6860, + "speed": 4163, + "speeding": 27264, + "speeds": 22017, + "speedway": 11480, + "speedy": 21603, + "spel": 41887, + "spell": 22784, + "spell": 11230, + "spelled": 24339, + "spelling": 15614, + "spells": 25335, + "spelt": 38316, + "spen": 5087, + "spence": 33324, + "spencer": 27509, + "spencer": 10678, + "spend": 4664, + "spending": 5961, + "spends": 22508, + "spent": 4429, + "speople": 33035, + "sper": 8213, + "sper": 15313, + "sperm": 35781, + "sperson": 22687, + "spf": 34973, + "spg": 34623, + "sph": 28909, + "sph": 24684, + "sphe": 33691, + "spher": 18349, + "sphere": 6987, + "spheres": 37478, + "spheric": 21744, + "sphin": 39237, + "sphinx": 46487, + "spho": 20442, + "sphoto": 38594, + "sphy": 43808, + "spi": 3174, + "spi": 37080, + "spic": 17264, + "spice": 29761, + "spice": 10141, + "spiced": 24267, + "spicer": 37627, + "spices": 21194, + "spicy": 10915, + "spide": 36801, + "spider": 11963, + "spider": 7622, + "spiderman": 39808, + "spiderman": 18427, + "spiders": 23141, + "spidey": 41706, + "spie": 28573, + "spie": 28746, + "spied": 43998, + "spiegel": 45351, + "spiel": 28435, + "spiel": 37690, + "spielberg": 37569, + "spies": 25374, + "spieth": 43254, + "spike": 35306, + "spike": 15310, + "spiked": 47014, + "spikes": 29582, + "spil": 47765, + "spill": 43933, + "spill": 18006, + "spilled": 33206, + "spilling": 49006, + "spills": 35796, + "spin": 6288, + "spin": 9226, + "spinach": 14747, + "spinal": 23925, + "spine": 48221, + "spine": 19646, + "sping": 47113, + "spinner": 29924, + "spinning": 13987, + "spino": 40848, + "spinoff": 42513, + "spinrilla": 46064, + "spins": 27243, + "spion": 39604, + "spionage": 41838, + "spir": 3745, + "spiral": 19873, + "spiration": 38126, + "spire": 27439, + "spired": 40650, + "spires": 46938, + "spiri": 4024, + "spirit": 18224, + "spirit": 4071, + "spirited": 34701, + "spirits": 13192, + "spiritu": 7237, + "spiritual": 46076, + "spiritual": 9473, + "spirituality": 22165, + "spiro": 40085, + "spit": 18115, + "spit": 23177, + "spite": 26060, + "spitfire": 31126, + "spitting": 40721, + "spl": 2470, + "spl": 33052, + "spla": 4809, + "splac": 16059, + "splace": 38743, + "splash": 43641, + "splash": 11879, + "splat": 15733, + "splatoon": 22565, + "splay": 3169, + "splen": 18552, + "splend": 29861, + "splendid": 21016, + "splendor": 46262, + "splin": 38090, + "split": 25443, + "split": 9109, + "splits": 34897, + "splitting": 37210, + "splus": 40866, + "spn": 35467, + "spn": 19414, + "spnfamily": 38566, + "spo": 1261, + "spo": 21085, + "spock": 43918, + "spoil": 25600, + "spoiled": 21399, + "spoiler": 16512, + "spoilers": 18326, + "spoils": 42436, + "spoilt": 35358, + "spokane": 24528, + "spoke": 13890, + "spoke": 6518, + "spoken": 12979, + "spokesman": 31632, + "spokesperson": 26234, + "spol": 22476, + "spol": 8132, + "spoli": 34301, + "spolice": 37406, + "spon": 1715, + "spon": 48216, + "sponge": 22861, + "sponge": 24345, + "spongebob": 25089, + "spons": 5597, + "sponsor": 10424, + "sponsor": 7574, + "sponsored": 7197, + "sponsoring": 16181, + "sponsors": 11005, + "sponsorship": 17632, + "spontaneous": 32465, + "spoo": 11248, + "spooky": 15369, + "spool": 49152, + "spoon": 27001, + "spoon": 14024, + "spoons": 29661, + "spor": 1475, + "spor": 33746, + "sport": 4379, + "sport": 2364, + "sporting": 32620, + "sporting": 8944, + "sports": 6436, + "sports": 2054, + "sportsc": 40114, + "sportscar": 46931, + "sportscenter": 39157, + "sportsman": 39020, + "sportsmanship": 34858, + "sportsnet": 34144, + "sportswear": 39747, + "sporty": 33346, + "spot": 3223, + "spot": 3049, + "spotify": 7193, + "spotlight": 7901, + "spots": 7670, + "spotted": 4533, + "spotter": 30742, + "spotting": 15885, + "spouse": 24724, + "spout": 48993, + "spp": 47567, + "spr": 1536, + "spr": 19417, + "spra": 12966, + "spraw": 46590, + "spray": 37885, + "spray": 10449, + "sprayed": 40022, + "spraying": 39224, + "spre": 18740, + "spread": 20620, + "spread": 5284, + "spreading": 11821, + "spreads": 27579, + "spree": 21851, + "spri": 35498, + "spride": 26685, + "spring": 5166, + "spring": 2420, + "springbreak": 37753, + "springer": 30117, + "springfield": 16599, + "springs": 7308, + "springst": 32132, + "springsteen": 28367, + "springtime": 28285, + "springtraining": 49364, + "springwatch": 29239, + "sprink": 15817, + "sprinkle": 42897, + "sprinkler": 48754, + "sprinkles": 37326, + "sprint": 29248, + "sprint": 10751, + "sprinter": 36947, + "sprints": 36404, + "sprite": 32544, + "spro": 13902, + "spro": 37403, + "sproject": 37802, + "sproud": 37686, + "sprout": 35863, + "sprouts": 25756, + "spru": 17041, + "spruce": 23812, + "sprung": 32968, + "sps": 13869, + "spu": 23566, + "spun": 47922, + "spun": 32852, + "spur": 15206, + "spur": 20361, + "spurs": 10916, + "spursofficial": 45290, + "sput": 47521, + "spx": 20584, + "spy": 13861, + "spy": 6656, + "spyder": 39952, + "spying": 36227, + "sq": 9370, + "sq": 11590, + "sqft": 41912, + "sql": 42759, + "sql": 18938, + "sqm": 47978, + "sqn": 41209, + "squ": 1653, + "squad": 13892, + "squad": 4234, + "squadron": 18579, + "squads": 36590, + "square": 19314, + "square": 3999, + "squared": 32967, + "squares": 26972, + "squash": 13312, + "squat": 44628, + "squat": 30680, + "squats": 40213, + "sque": 9721, + "sque": 8097, + "squee": 14420, + "squeeze": 21684, + "squeezed": 40413, + "squid": 42057, + "squid": 22553, + "squir": 9683, + "squire": 48090, + "squirrel": 14004, + "squirrels": 26623, + "squish": 42607, + "squishy": 47001, + "sr": 3437, + "sr": 5428, + "srbachchan": 32353, + "src": 23445, + "sre": 17748, + "sri": 11051, + "sri": 9276, + "sridevi": 46301, + "srilan": 15559, + "srilanka": 16922, + "srin": 26818, + "srinagar": 33671, + "srini": 41899, + "sriracha": 42743, + "sris": 27851, + "srisri": 32966, + "srk": 44982, + "srk": 11216, + "srl": 33808, + "srp": 43004, + "srs": 41764, + "srsly": 44179, + "srt": 28139, + "sru": 44152, + "srugby": 40526, + "ss": 690, + "ss": 632, + "ssa": 6088, + "ssal": 31330, + "ssal": 35936, + "ssb": 37511, + "ssc": 21692, + "ssc": 20364, + "ssd": 23107, + "sse": 9030, + "sse": 8938, + "ssed": 38755, + "ssed": 1804, + "ssel": 17402, + "ssel": 19373, + "sseldorf": 47792, + "ssell": 42388, + "ssels": 8355, + "ssen": 39408, + "ssen": 22645, + "sser": 20445, + "sses": 1802, + "ssett": 44103, + "ssf": 33239, + "ssg": 40707, + "ssh": 48866, + "ssi": 834, + "ssi": 14953, + "ssia": 22238, + "ssian": 31218, + "ssible": 47099, + "ssic": 27774, + "ssic": 17077, + "ssie": 7572, + "ssier": 26422, + "ssil": 15026, + "ssin": 42660, + "ssing": 2112, + "ssion": 16050, + "ssion": 1627, + "ssional": 13727, + "ssionism": 24787, + "ssionist": 27682, + "ssions": 4137, + "ssive": 2734, + "ssively": 28060, + "ssl": 32195, + "ssler": 30287, + "ssly": 24904, + "ssn": 39116, + "ssnhq": 47998, + "sso": 25900, + "sso": 7914, + "ssoccer": 32546, + "sson": 36124, + "sson": 7271, + "ssor": 35152, + "ssp": 31101, + "ssr": 39880, + "sss": 11176, + "ssss": 30676, + "ssss": 15880, + "sssss": 24298, + "sst": 40396, + "ssu": 35351, + "ssummit": 49301, + "ssus": 31286, + "ssw": 36937, + "ssy": 22519, + "ssy": 8661, + "st": 522, + "st": 545, + "sta": 1363, + "sta": 2745, + "stab": 7726, + "stab": 29974, + "stabbed": 24534, + "stabbing": 25474, + "stabil": 42576, + "stabili": 23903, + "stability": 16716, + "stable": 44427, + "stable": 10492, + "stables": 34218, + "stac": 10175, + "stacey": 41653, + "stacey": 24262, + "stache": 23616, + "stack": 24723, + "stack": 11257, + "stacked": 24990, + "stacking": 39836, + "stacks": 24734, + "stacy": 26628, + "stad": 15832, + "stad": 16485, + "stade": 38198, + "stadi": 26587, + "stadion": 48815, + "stadium": 3390, + "stadiums": 38852, + "stadt": 22713, + "staf": 2367, + "staff": 31188, + "staff": 2813, + "staffer": 38494, + "staffers": 44994, + "staffing": 32932, + "stafford": 25006, + "staffordshire": 29198, + "staffs": 36098, + "stag": 12088, + "stag": 20277, + "stage": 23182, + "stage": 2170, + "staged": 19906, + "stages": 12297, + "staggering": 37315, + "staging": 27026, + "stagram": 19503, + "stags": 45936, + "stain": 3933, + "stain": 14603, + "stained": 13751, + "staining": 32523, + "stainless": 12320, + "stains": 32008, + "stair": 7240, + "stair": 17662, + "staircase": 22777, + "stairs": 9577, + "stairway": 45559, + "stak": 39144, + "stake": 15955, + "stake": 7937, + "stakeholder": 39122, + "stakeholders": 22968, + "stakes": 7519, + "staking": 47082, + "stal": 3861, + "stal": 5535, + "stale": 42471, + "stalert": 25450, + "stalin": 28346, + "stalk": 40826, + "stalk": 14878, + "stalker": 26777, + "stalking": 24721, + "stalks": 45886, + "stall": 24636, + "stall": 12058, + "stalled": 40362, + "stallion": 28273, + "stallions": 44787, + "stallone": 40969, + "stalls": 25427, + "stam": 4663, + "stamatic": 30904, + "stamford": 27843, + "stamina": 48753, + "stamp": 28694, + "stamp": 12771, + "stampcollecting": 42852, + "stamped": 38356, + "stampede": 25384, + "stamps": 13827, + "stan": 2203, + "stan": 2434, + "stana": 33311, + "stanbul": 11231, + "stance": 48900, + "stance": 3542, + "stances": 15054, + "stand": 1819, + "stand": 2087, + "standalone": 44887, + "standard": 35780, + "standard": 5807, + "standardi": 30247, + "standards": 9022, + "standby": 36184, + "standing": 39934, + "standing": 2862, + "standings": 19835, + "standoff": 31821, + "standout": 23131, + "standre": 48309, + "stands": 6446, + "standup": 35108, + "standup": 24964, + "standwith": 19540, + "stanford": 36219, + "stanford": 15087, + "stang": 12536, + "stani": 38228, + "stanis": 37711, + "stanley": 19048, + "stanley": 10079, + "stanleycup": 28662, + "stans": 26564, + "stant": 41576, + "stant": 4906, + "stanton": 25400, + "stap": 10438, + "staple": 22695, + "staples": 23646, + "stapleton": 45228, + "star": 993, + "star": 1565, + "starbuck": 48519, + "starbucks": 9499, + "starch": 47837, + "starcraft": 48871, + "stardom": 44616, + "stardust": 34337, + "stare": 18094, + "stared": 47772, + "stares": 37916, + "starfish": 44283, + "stargate": 41099, + "stargazing": 49328, + "staring": 13800, + "stark": 40446, + "stark": 15353, + "starlight": 32197, + "starling": 46205, + "starmagic": 48023, + "starplus": 37815, + "starr": 19186, + "starred": 24180, + "starrer": 41311, + "starring": 6660, + "starry": 30963, + "stars": 2895, + "starship": 37166, + "start": 17466, + "start": 1572, + "started": 2760, + "starter": 7800, + "starters": 22222, + "starting": 2530, + "startrek": 30642, + "startrek": 15349, + "starts": 3105, + "startu": 6996, + "startup": 18049, + "startup": 5882, + "startups": 9056, + "starve": 46957, + "starving": 30473, + "starwar": 17287, + "starwars": 26239, + "starwars": 7887, + "starz": 25928, + "stas": 19866, + "stash": 27711, + "stasy": 45942, + "stat": 3004, + "stat": 15216, + "state": 3492, + "state": 1295, + "statec": 33931, + "stated": 19629, + "statedept": 41458, + "statefair": 40305, + "statement": 5401, + "statements": 19513, + "staten": 38263, + "stateof": 35195, + "states": 22125, + "states": 4218, + "statesman": 35301, + "stateu": 44248, + "statewide": 29561, + "stati": 9622, + "static": 16363, + "stating": 35147, + "station": 13498, + "station": 2631, + "stationary": 29493, + "stationed": 47618, + "stationery": 33851, + "stations": 10051, + "statistical": 29349, + "statistics": 14165, + "stats": 7294, + "statu": 32481, + "statue": 8222, + "statues": 24363, + "status": 6414, + "stau": 28550, + "staur": 3709, + "stav": 20285, + "stax": 32235, + "stay": 4714, + "stay": 2277, + "stayed": 13805, + "staying": 8993, + "stays": 13311, + "staytuned": 39285, + "stc": 29859, + "std": 30477, + "ste": 795, + "ste": 2686, + "stea": 46614, + "stead": 16101, + "stead": 11031, + "steadily": 35049, + "steady": 12937, + "steak": 26955, + "steak": 8913, + "steakhouse": 35031, + "steaks": 30655, + "steal": 37070, + "steal": 10181, + "stealing": 14242, + "steals": 20224, + "stealth": 25327, + "steam": 10962, + "steam": 6972, + "steamboat": 41121, + "steamed": 29007, + "steamer": 49075, + "steaming": 43746, + "steampunk": 24130, + "steamy": 43104, + "stec": 46713, + "stech": 48949, + "stech": 32455, + "sted": 20426, + "sted": 1356, + "stee": 31793, + "steed": 48293, + "steel": 6938, + "steel": 4726, + "steele": 19460, + "steelers": 14430, + "steen": 42851, + "steen": 18625, + "steep": 28648, + "steep": 20714, + "steer": 27612, + "steering": 19833, + "stef": 29158, + "stefan": 15004, + "stefan": 18829, + "stefani": 38319, + "stefano": 30719, + "steff": 30075, + "stein": 13653, + "stein": 5818, + "steiner": 36314, + "stel": 9102, + "stel": 10798, + "stell": 22355, + "stella": 46178, + "stella": 17869, + "stellar": 13810, + "stellen": 42754, + "stem": 24342, + "stem": 6761, + "stemc": 40486, + "stems": 31503, + "sten": 7652, + "sten": 7877, + "stencil": 47854, + "stennis": 45636, + "step": 15572, + "step": 3348, + "steph": 3522, + "steph": 16251, + "stephan": 37312, + "stephani": 48121, + "stephanie": 14361, + "stephen": 10421, + "stephen": 6078, + "stephenking": 46361, + "stephens": 22256, + "stephenson": 37280, + "stepped": 18384, + "stepping": 15906, + "steps": 5408, + "ster": 1022, + "ster": 881, + "stere": 9229, + "stered": 6935, + "stereo": 15992, + "stereo": 17400, + "stereotypes": 27890, + "steria": 38804, + "stering": 14175, + "sterling": 45790, + "sterling": 9378, + "stern": 36254, + "stern": 2945, + "steroids": 37670, + "sterone": 39418, + "sters": 2132, + "stery": 24232, + "stest": 8556, + "stev": 11640, + "steve": 7412, + "steve": 3803, + "steven": 10973, + "steven": 8016, + "stevens": 13877, + "stevenson": 25091, + "stevie": 42104, + "stevie": 18969, + "stew": 17906, + "stewar": 28453, + "steward": 34980, + "steward": 43355, + "stewards": 49294, + "stewardship": 36720, + "stewart": 8120, + "stfu": 47000, + "stg": 48387, + "stgeorge": 43698, + "sth": 13456, + "sth": 34004, + "sthe": 16491, + "sthel": 42863, + "sti": 860, + "sti": 12439, + "stia": 26492, + "stible": 25835, + "stic": 5868, + "stic": 1561, + "stical": 16660, + "stically": 19041, + "stick": 5483, + "stick": 4987, + "sticker": 11270, + "stickers": 11613, + "sticking": 21021, + "sticks": 10016, + "sticky": 18887, + "stics": 5449, + "stie": 38164, + "stie": 11000, + "stier": 42069, + "sties": 16428, + "stiff": 43471, + "stiff": 21441, + "stig": 4088, + "stig": 42551, + "stigate": 15390, + "stigma": 20619, + "stik": 42247, + "stil": 21790, + "stil": 37519, + "stiles": 33028, + "still": 13209, + "still": 1170, + "stills": 20259, + "stim": 18269, + "stime": 24711, + "stimul": 16434, + "stimulate": 42380, + "stimulating": 41237, + "stimulation": 39530, + "stimulus": 47283, + "stin": 2588, + "stin": 4025, + "stina": 22359, + "stine": 7098, + "sting": 19868, + "sting": 1271, + "stingly": 49332, + "stingray": 43229, + "stink": 38213, + "stinky": 44957, + "stino": 40658, + "stint": 33531, + "stion": 10812, + "stip": 39869, + "stips": 44756, + "stique": 43305, + "stir": 12416, + "stir": 19564, + "stirling": 23128, + "stirring": 39205, + "stis": 45224, + "stit": 14110, + "stitch": 30003, + "stitch": 14771, + "stitched": 36540, + "stitcher": 48204, + "stitches": 32360, + "stitching": 45208, + "stitu": 14585, + "stitutes": 40479, + "stive": 22426, + "stix": 48829, + "stjohn": 36153, + "stl": 14179, + "stl": 12527, + "stlblues": 44138, + "stlcards": 28644, + "stle": 7698, + "stles": 48638, + "stlouis": 40358, + "stlouis": 39516, + "stm": 28333, + "stn": 27175, + "sto": 928, + "sto": 5723, + "stock": 5899, + "stock": 3206, + "stocked": 23552, + "stockholm": 16024, + "stocki": 42944, + "stocking": 17335, + "stockings": 28040, + "stockmarket": 40359, + "stockport": 35569, + "stocks": 9321, + "stockton": 26130, + "stoday": 22392, + "stok": 43782, + "stoke": 31338, + "stoke": 13550, + "stoked": 13160, + "stokes": 27512, + "stol": 11401, + "stol": 6700, + "stole": 10995, + "stolen": 8704, + "stolic": 45020, + "stom": 2343, + "stom": 38068, + "stoma": 43545, + "stomach": 14722, + "stomp": 40165, + "stomping": 46144, + "ston": 4101, + "ston": 1839, + "stone": 7694, + "stone": 2441, + "stoned": 36248, + "stonehenge": 42417, + "stoner": 35131, + "stoner": 29115, + "stones": 42659, + "stones": 6885, + "stonewall": 39688, + "stoney": 44198, + "stony": 41717, + "stony": 35691, + "stoo": 24505, + "stood": 9151, + "stool": 34413, + "stool": 22314, + "stop": 6005, + "stop": 1691, + "stopbrexit": 48680, + "stopp": 15738, + "stopped": 6015, + "stopper": 32147, + "stoppers": 34457, + "stopping": 10735, + "stops": 9822, + "stopthe": 26463, + "stor": 809, + "stor": 17740, + "storage": 6824, + "store": 17769, + "store": 2183, + "stored": 28257, + "stores": 6370, + "storey": 24025, + "storians": 34628, + "stories": 3784, + "storing": 40087, + "stork": 46452, + "storm": 7434, + "storm": 2819, + "stormed": 45939, + "stormhour": 12161, + "storming": 24842, + "storms": 6464, + "stormtrooper": 49218, + "stormy": 20075, + "stors": 7178, + "story": 6512, + "story": 1134, + "storyline": 37079, + "storymonth": 23717, + "storyteller": 35882, + "storytelling": 14457, + "storytime": 44197, + "stos": 19281, + "stou": 37168, + "stour": 37361, + "stour": 21928, + "stout": 16550, + "stove": 21423, + "stow": 44284, + "stow": 17046, + "stowe": 34196, + "stown": 28071, + "stown": 7939, + "stp": 30576, + "stpatrick": 21343, + "stpatricksday": 22747, + "str": 807, + "str": 15913, + "stra": 1894, + "stra": 6253, + "strack": 46861, + "strada": 31134, + "strade": 48968, + "straigh": 31016, + "straight": 22114, + "straight": 4241, + "strain": 16887, + "strains": 38067, + "strait": 22946, + "straits": 41984, + "stral": 23289, + "stralia": 42510, + "stran": 18411, + "strand": 18214, + "strand": 17826, + "stranded": 22975, + "strang": 11138, + "strange": 33380, + "strange": 7288, + "strangely": 37566, + "stranger": 35541, + "stranger": 14149, + "strangers": 20684, + "strangerthings": 43271, + "strangest": 46740, + "strap": 13946, + "strapped": 40922, + "straps": 31213, + "stras": 36814, + "stras": 42125, + "strasbourg": 39576, + "strat": 11345, + "strat": 32925, + "strata": 47278, + "strate": 3532, + "strate": 28758, + "strategi": 49102, + "strategic": 10246, + "strategically": 45706, + "strategies": 9942, + "strategist": 37180, + "strategy": 5637, + "strates": 45724, + "stratford": 23955, + "strath": 21997, + "stration": 3156, + "strato": 28878, + "strauss": 32033, + "strava": 34625, + "stravel": 43494, + "straw": 7430, + "straw": 16438, + "strawberries": 17796, + "strawberry": 10233, + "straws": 33048, + "stray": 30784, + "stray": 15712, + "stre": 1079, + "stre": 19652, + "stread": 27797, + "streak": 11749, + "streaks": 42092, + "stream": 8659, + "stream": 3322, + "streamed": 26280, + "streamer": 25178, + "streamers": 19937, + "streaming": 6278, + "streamline": 44917, + "streams": 13545, + "stree": 35082, + "stree": 32438, + "streep": 38701, + "street": 4839, + "street": 2012, + "streetart": 12948, + "streetcar": 34268, + "streetfood": 44486, + "streetphotography": 20786, + "streets": 6058, + "streetstyle": 39118, + "streetwear": 37298, + "strel": 39685, + "stren": 4349, + "streng": 4472, + "strength": 15475, + "strength": 5959, + "strengthen": 16318, + "strengthened": 47131, + "strengthening": 23475, + "strengthens": 40280, + "strengths": 29268, + "stress": 17297, + "stress": 5843, + "stressed": 16497, + "stresses": 32112, + "stressful": 24268, + "stressing": 35917, + "stret": 12265, + "stretch": 10064, + "stretched": 29393, + "stretches": 32231, + "stretching": 24423, + "stri": 1493, + "stri": 27795, + "stria": 39620, + "strial": 30217, + "strian": 12924, + "stric": 2607, + "strick": 25181, + "strickland": 48939, + "strict": 21585, + "strictly": 16475, + "stride": 36024, + "strides": 37355, + "stries": 18171, + "strife": 46473, + "strike": 20774, + "strike": 5767, + "striker": 12448, + "strikers": 33465, + "strikes": 9280, + "striking": 13392, + "string": 25512, + "string": 9696, + "strings": 15699, + "strip": 9317, + "stripe": 19368, + "striped": 22192, + "stripes": 14239, + "stripped": 26602, + "stripper": 45759, + "stripping": 48588, + "strips": 19000, + "strive": 22140, + "striving": 37671, + "stro": 3121, + "stro": 6186, + "stroke": 44621, + "stroke": 10403, + "strokes": 26595, + "strol": 30123, + "stroll": 15924, + "stroller": 47076, + "strolling": 40911, + "strom": 14707, + "stron": 4165, + "strong": 10436, + "strong": 2389, + "stronger": 27760, + "stronger": 9245, + "strongertogether": 38532, + "strongest": 16171, + "strongh": 38678, + "strongly": 15507, + "strophy": 47912, + "strou": 48425, + "stroud": 39895, + "strous": 23752, + "stru": 1666, + "struc": 3311, + "struck": 10861, + "struction": 12497, + "structural": 16899, + "structure": 5285, + "structured": 27147, + "structures": 14171, + "structuring": 37496, + "strugg": 5176, + "struggle": 8443, + "struggled": 32921, + "struggles": 17446, + "struggling": 12135, + "struly": 34118, + "strum": 37632, + "strung": 46033, + "strust": 23920, + "strut": 48375, + "stry": 17325, + "stry": 2245, + "sts": 1088, + "stu": 858, + "stu": 23531, + "stuart": 32054, + "stuart": 11723, + "stub": 27066, + "stubborn": 38955, + "stuck": 6596, + "stud": 22368, + "stud": 13319, + "studded": 29153, + "studen": 44156, + "student": 14681, + "student": 2556, + "students": 1712, + "studi": 5691, + "studied": 21369, + "studies": 6426, + "studio": 17798, + "studio": 3155, + "studios": 6231, + "studs": 27571, + "study": 21051, + "study": 3123, + "studyabroad": 45425, + "studying": 8826, + "stuff": 46072, + "stuff": 3487, + "stuffed": 11781, + "stuffing": 31612, + "stuffs": 43455, + "stuk": 32424, + "stumb": 16784, + "stumble": 39045, + "stumbled": 21776, + "stump": 32064, + "stun": 3088, + "stun": 37959, + "stunned": 34034, + "stunner": 29965, + "stunning": 3769, + "stunningly": 47515, + "stuns": 43796, + "stunt": 19905, + "stunts": 40118, + "stupi": 18975, + "stupid": 42600, + "stupid": 8085, + "stupidity": 33766, + "stur": 10676, + "sturdy": 43780, + "stures": 27223, + "sturgeon": 31580, + "sturi": 21747, + "sturridge": 45331, + "stutt": 30444, + "stuttgart": 32219, + "stv": 27060, + "stv": 9708, + "stweet": 46832, + "stweets": 39174, + "stx": 42548, + "sty": 1421, + "sty": 2920, + "style": 12356, + "style": 1844, + "styled": 17974, + "styles": 6948, + "styli": 38577, + "styling": 14597, + "stylish": 10378, + "stylist": 15928, + "styn": 41394, + "su": 605, + "su": 2937, + "sua": 42448, + "suarez": 21437, + "suave": 47305, + "sub": 1783, + "sub": 7765, + "subaru": 21319, + "subjec": 16090, + "subject": 10300, + "subjects": 22099, + "subli": 16350, + "sublime": 22367, + "submarine": 19968, + "submer": 27156, + "submerged": 43171, + "submission": 16571, + "submissions": 21566, + "submit": 10423, + "submitted": 15189, + "submitting": 38788, + "subram": 49207, + "subs": 16398, + "subscri": 5838, + "subscribe": 9839, + "subscribed": 44867, + "subscriber": 36292, + "subscribers": 17337, + "subscription": 17979, + "subscriptions": 47162, + "subsequ": 33598, + "subsequent": 44323, + "subsi": 14856, + "subsidi": 45029, + "subsidiary": 45506, + "subsidies": 37685, + "subsidy": 47462, + "substan": 17487, + "substance": 19309, + "substances": 36834, + "substantial": 27171, + "substantially": 47577, + "substitu": 18529, + "substitute": 25340, + "subtitles": 39479, + "subtle": 16536, + "subur": 12517, + "suburb": 37664, + "suburban": 23570, + "suburbs": 25317, + "subway": 12196, + "suc": 1869, + "succe": 7981, + "succeed": 13556, + "succeeded": 41077, + "succes": 39019, + "success": 3695, + "success": 3034, + "successes": 29436, + "successful": 4670, + "successfully": 9934, + "succession": 38491, + "successive": 41319, + "successor": 34774, + "succu": 45253, + "succul": 25671, + "succulent": 35236, + "such": 2046, + "suction": 42786, + "sud": 8067, + "sud": 33714, + "sudan": 31149, + "sudan": 13474, + "sudanese": 42837, + "sudbury": 32488, + "sudden": 10833, + "sudden": 15433, + "suddenly": 11076, + "sue": 14045, + "sue": 6641, + "sued": 22225, + "suede": 21036, + "sues": 17105, + "suf": 21204, + "suf": 22579, + "sufc": 37091, + "suff": 4866, + "suffe": 13510, + "suffer": 13557, + "suffered": 14766, + "suffering": 10140, + "suffers": 22389, + "sufficient": 28410, + "suffol": 13775, + "suffolk": 46408, + "suffolk": 15685, + "suffra": 34596, + "suffrage": 39567, + "sufi": 39756, + "sug": 3189, + "suga": 28757, + "sugar": 12418, + "sugar": 5574, + "sugge": 6345, + "suggest": 13356, + "suggested": 18790, + "suggesti": 15033, + "suggesting": 29792, + "suggestion": 23741, + "suggestions": 16052, + "suggests": 13333, + "suho": 32744, + "sui": 24972, + "suici": 16372, + "suicidal": 37165, + "suicide": 31310, + "suicide": 8247, + "suing": 18309, + "suisse": 35964, + "suit": 11887, + "suit": 3940, + "suitable": 17476, + "suitcase": 27792, + "suite": 9346, + "suited": 25919, + "suites": 21523, + "suits": 9949, + "suk": 24820, + "suk": 6886, + "suka": 44017, + "suke": 25590, + "sukh": 46961, + "suki": 32704, + "sul": 1767, + "sul": 19879, + "sula": 34713, + "sula": 26143, + "sullivan": 14477, + "sully": 37752, + "sulph": 37234, + "sulphur": 47659, + "sultan": 35650, + "sultan": 17049, + "sum": 7054, + "sum": 8257, + "suma": 47938, + "sumat": 32640, + "sumatra": 47346, + "sume": 45457, + "sumi": 41248, + "summ": 1309, + "summar": 34657, + "summari": 31993, + "summary": 13435, + "summed": 34912, + "summer": 5500, + "summer": 1673, + "summers": 18254, + "summerslam": 40264, + "summertime": 19025, + "summit": 30011, + "summit": 3768, + "summon": 27622, + "summon": 39782, + "sumner": 46813, + "sumo": 33734, + "sump": 34252, + "sumptuous": 47354, + "sums": 13325, + "sun": 968, + "sun": 2176, + "sunbathing": 46994, + "sunburn": 45767, + "sund": 40735, + "sundae": 38078, + "sundance": 24128, + "sundar": 44936, + "sunday": 6649, + "sunday": 1706, + "sundayfunday": 21565, + "sundaymorning": 24809, + "sundaymotivation": 46227, + "sundays": 15827, + "sundaywith": 26469, + "sundaywithmarsha": 26662, + "sunder": 15097, + "sunderland": 45727, + "sunderland": 18851, + "sundown": 44438, + "sune": 41096, + "sunflower": 21559, + "sunflowers": 39809, + "sung": 16903, + "sung": 6047, + "sunglasses": 12906, + "suni": 17663, + "suni": 47010, + "sunil": 32861, + "sunite": 21382, + "sunited": 35276, + "sunk": 37534, + "sunken": 43473, + "sunlight": 17996, + "sunni": 44315, + "sunny": 15632, + "sunny": 5438, + "sunrise": 5610, + "suns": 18322, + "sunscreen": 29355, + "sunset": 37880, + "sunset": 3424, + "sunsets": 17721, + "sunshine": 32761, + "sunshine": 5385, + "suny": 41308, + "sup": 19078, + "sup": 8249, + "supdates": 24177, + "super": 1642, + "super": 1994, + "superb": 8930, + "superbike": 45709, + "superbowl": 47461, + "superbowl": 16467, + "supercar": 27021, + "supercars": 32185, + "supercell": 43227, + "supercharged": 47479, + "supere": 46831, + "superfood": 41715, + "supergirl": 25771, + "superhero": 14049, + "superheroes": 23334, + "superint": 17615, + "superintendent": 19020, + "superior": 13205, + "superjunior": 40475, + "superleague": 45539, + "superman": 11237, + "supermarket": 19897, + "supermarkets": 45106, + "supermodel": 41963, + "supermoon": 36571, + "supernatural": 15484, + "supernova": 39843, + "superrugby": 48717, + "supersonic": 42019, + "supersport": 46319, + "superst": 38202, + "superstar": 32551, + "superstar": 10472, + "superstars": 25797, + "supervis": 12709, + "supervised": 41316, + "supervision": 36234, + "supervisor": 20366, + "supervisors": 37958, + "superyacht": 42714, + "supp": 1023, + "supper": 15727, + "supple": 31431, + "supplement": 19924, + "supplements": 21265, + "supplied": 24106, + "supplier": 18043, + "suppliers": 24196, + "supplies": 9384, + "supply": 25074, + "supply": 6389, + "supplychain": 31224, + "supplying": 32739, + "suppo": 6941, + "suppor": 2104, + "support": 12062, + "support": 1425, + "supported": 8038, + "supporter": 12992, + "supporters": 7403, + "supportindiefilm": 43976, + "supporting": 3976, + "supportive": 18313, + "supportlocal": 43852, + "supports": 8336, + "supportsmall": 30941, + "supportsmallstreamers": 36097, + "suppose": 18924, + "supposed": 9119, + "supposedly": 32302, + "suppre": 20542, + "suppression": 36508, + "supra": 48485, + "supre": 5875, + "supremac": 28643, + "supremacist": 39005, + "supremacy": 28913, + "supreme": 35222, + "supreme": 7468, + "supt": 23625, + "sur": 1090, + "sur": 7123, + "sura": 33412, + "sura": 49125, + "surabaya": 45227, + "surance": 22184, + "surat": 30201, + "sure": 14320, + "sure": 1650, + "sured": 36869, + "surely": 11409, + "sures": 12725, + "suresh": 32118, + "suresh": 31464, + "sureshpp": 41924, + "sureshpprabhu": 42050, + "surf": 10176, + "surf": 10322, + "surface": 7744, + "surfaces": 20746, + "surfer": 24925, + "surfers": 34842, + "surfing": 15762, + "surg": 13045, + "surge": 17457, + "surgeon": 16039, + "surgeons": 26000, + "surger": 5122, + "surgeries": 34940, + "surgery": 5344, + "surgical": 16386, + "suri": 14130, + "suri": 33952, + "suring": 16817, + "suriya": 17832, + "surpass": 45494, + "surpassed": 25648, + "surplus": 29413, + "surpri": 3244, + "surprise": 5099, + "surprised": 8949, + "surprises": 16920, + "surprising": 14964, + "surprisingly": 17367, + "surreal": 18408, + "surrealism": 41773, + "surrender": 20964, + "surrendered": 44601, + "surrey": 26489, + "surrey": 14315, + "surro": 47499, + "surroun": 8250, + "surround": 26543, + "surround": 22999, + "surrounded": 13589, + "surrounding": 12544, + "surroundings": 26915, + "surrounds": 39012, + "suru": 49240, + "surve": 8952, + "surveill": 15408, + "surveillance": 15578, + "survey": 45914, + "survey": 6809, + "surveying": 33085, + "surveys": 25096, + "survi": 3440, + "surviv": 12922, + "survival": 10172, + "survive": 10431, + "survived": 13483, + "survives": 30927, + "surviving": 18609, + "survivor": 31934, + "survivor": 10944, + "survivors": 13711, + "surya": 37767, + "sus": 8091, + "sus": 3036, + "susa": 20546, + "susan": 19922, + "susan": 10168, + "suscep": 44270, + "sush": 22298, + "sushi": 11729, + "sushmaswar": 48200, + "susie": 32284, + "susp": 7971, + "suspec": 10298, + "suspect": 9065, + "suspected": 15579, + "suspects": 18265, + "suspen": 10578, + "suspend": 41007, + "suspended": 13126, + "suspends": 39535, + "suspense": 21556, + "suspension": 15417, + "suspici": 25714, + "suspicion": 34910, + "suspicious": 19862, + "sussex": 31244, + "sussex": 13266, + "sustain": 4644, + "sustain": 28156, + "sustainability": 9635, + "sustainable": 23645, + "sustainable": 7078, + "sustained": 22699, + "sustaining": 44418, + "sut": 23984, + "sut": 28956, + "sutherland": 27592, + "sutton": 39359, + "sutton": 18564, + "suv": 15985, + "suz": 9957, + "suzanne": 24617, + "suzu": 36289, + "suzuki": 16892, + "suzy": 26552, + "sv": 6508, + "sv": 17083, + "svc": 45065, + "sve": 47637, + "sven": 37786, + "sven": 45183, + "sver": 45923, + "sville": 44580, + "sville": 6741, + "svp": 28465, + "svt": 42014, + "svu": 32123, + "sw": 1220, + "sw": 4457, + "swa": 4707, + "swa": 31916, + "swach": 20862, + "swachhb": 31898, + "swachhbharat": 36927, + "swag": 8852, + "swag": 8177, + "swagg": 47702, + "swagger": 35797, + "swain": 43226, + "swal": 13433, + "swallow": 28979, + "swallowed": 46956, + "swallows": 45124, + "swam": 42539, + "swami": 25021, + "swamp": 41953, + "swamp": 16595, + "swamy": 28445, + "swan": 8215, + "swan": 12530, + "swana": 24699, + "swans": 19516, + "swansea": 16567, + "swanson": 34797, + "swap": 15234, + "swapped": 39077, + "swapping": 44702, + "swaps": 49242, + "swar": 11680, + "swarm": 31577, + "swarovski": 28515, + "swat": 32547, + "swat": 26482, + "swatch": 48053, + "sway": 26443, + "sway": 26617, + "swc": 42231, + "swe": 2350, + "swe": 38070, + "swear": 7406, + "swearing": 32627, + "sweat": 10282, + "sweat": 12663, + "sweater": 11455, + "sweaters": 31303, + "sweating": 33215, + "sweats": 39321, + "sweatshirt": 22442, + "sweaty": 28419, + "sweden": 8760, + "swedish": 11585, + "swee": 1812, + "sweek": 30017, + "sweeney": 27286, + "sweep": 23220, + "sweep": 13669, + "sweeping": 25719, + "sweeps": 26887, + "sweepstakes": 25992, + "sweet": 10957, + "sweet": 2418, + "sweetened": 45577, + "sweeter": 32873, + "sweetest": 15180, + "sweethe": 16316, + "sweetheart": 18079, + "sweetie": 24450, + "sweetness": 29713, + "sweets": 18045, + "swel": 48470, + "swell": 35538, + "swell": 21490, + "swelling": 46578, + "swept": 23311, + "swer": 30514, + "swfc": 30227, + "swfl": 46607, + "swi": 3881, + "swi": 45223, + "swick": 17159, + "swif": 28548, + "swift": 34843, + "swift": 8229, + "swild": 33909, + "swild": 38696, + "swildlife": 46818, + "swim": 4928, + "swim": 7681, + "swimmer": 25475, + "swimmers": 27776, + "swimming": 7411, + "swims": 46798, + "swimsuit": 25504, + "swimwear": 31889, + "swin": 14554, + "swin": 40798, + "swindon": 29540, + "swine": 31166, + "swing": 25292, + "swing": 7429, + "swinging": 26760, + "swings": 29141, + "swipe": 31828, + "swire": 42753, + "swirl": 35795, + "swis": 23611, + "swish": 38571, + "swiss": 37917, + "swiss": 9287, + "swit": 3726, + "switch": 22480, + "switch": 5893, + "switched": 22869, + "switches": 33569, + "switching": 21155, + "swith": 17299, + "switzer": 9835, + "switzerland": 9912, + "swivel": 48256, + "swo": 38673, + "swol": 29575, + "swollen": 36129, + "swoo": 29744, + "swood": 24158, + "swoon": 37028, + "swoop": 45661, + "sword": 33294, + "sword": 11356, + "swords": 27181, + "swork": 42722, + "sworld": 33305, + "sworn": 21130, + "sworth": 13322, + "swt": 38878, + "swx": 20597, + "sx": 9402, + "sx": 17806, + "sxsw": 13369, + "sy": 974, + "sy": 2126, + "sya": 35017, + "sycam": 34911, + "sycamore": 43086, + "syd": 4525, + "syd": 22504, + "sydney": 15878, + "sydney": 5278, + "syed": 27624, + "syfy": 32047, + "sykes": 27287, + "syl": 6452, + "sylla": 41708, + "sylvania": 12011, + "sylve": 28369, + "sylvester": 37214, + "sylvia": 25670, + "sym": 3645, + "sym": 40327, + "symb": 22987, + "symbol": 13085, + "symboli": 22019, + "symbolic": 33177, + "symbolism": 44679, + "symbols": 25476, + "symmetry": 31427, + "symp": 11468, + "sympathi": 47493, + "sympathy": 32477, + "symph": 9544, + "symphonic": 42639, + "symphony": 11180, + "sympo": 9730, + "symposium": 9971, + "symptom": 47799, + "symptoms": 12956, + "syn": 3758, + "syn": 36090, + "synago": 30945, + "synagogue": 33518, + "sync": 20081, + "synchron": 23943, + "syndic": 21098, + "syndicate": 28779, + "syndrome": 10927, + "syner": 22283, + "synergy": 32012, + "syno": 31533, + "synod": 47712, + "synopsis": 47018, + "synth": 33841, + "synth": 24462, + "synthe": 22604, + "synthesi": 33565, + "synthesis": 21602, + "synthesizer": 44077, + "synthetic": 19917, + "syou": 26742, + "syour": 21718, + "syrac": 17279, + "syracuse": 19640, + "syrah": 45364, + "syri": 18917, + "syria": 5563, + "syrian": 47562, + "syrian": 10041, + "syrians": 41392, + "syrup": 16611, + "sys": 26726, + "syste": 1933, + "system": 47813, + "system": 2422, + "systematic": 28586, + "systemic": 33807, + "systems": 4828, + "sz": 13438, + "sz": 15879, + "sze": 44507, + "szn": 48092, + "são": 45911, + "sé": 37879, + "t": 83, + "t": 339, + "ta": 648, + "ta": 1397, + "taa": 43874, + "tab": 2648, + "tab": 14724, + "tabby": 36145, + "tabern": 48991, + "tability": 15770, + "table": 12108, + "table": 2175, + "tableau": 39723, + "tables": 7822, + "tablet": 12494, + "tabletop": 46843, + "tabletop": 25773, + "tablets": 20436, + "tably": 24440, + "taboo": 38400, + "tabs": 29163, + "tac": 3145, + "tac": 22653, + "tache": 39239, + "tack": 6339, + "tack": 34446, + "tackle": 10294, + "tackled": 47218, + "tackles": 18021, + "tackling": 19628, + "taco": 31924, + "taco": 12436, + "tacoma": 25397, + "tacos": 14090, + "tactic": 40377, + "tactical": 17137, + "tactics": 16410, + "tacular": 48985, + "tad": 15890, + "tad": 19860, + "tado": 40846, + "tae": 15257, + "tae": 15580, + "taehyung": 24642, + "taek": 30753, + "taekwondo": 39963, + "taemin": 30600, + "taeyang": 45802, + "taeyeon": 27389, + "taf": 29660, + "taft": 42141, + "tag": 3456, + "tag": 3640, + "tage": 2669, + "tages": 39902, + "tagged": 12969, + "tagging": 25138, + "tagne": 47467, + "tags": 11606, + "tah": 14822, + "tah": 7090, + "tahit": 45385, + "tahoe": 26140, + "tai": 6511, + "tai": 13040, + "taiji": 30185, + "tail": 7156, + "tail": 4132, + "tailed": 20626, + "tailgate": 23168, + "tailgating": 42625, + "tailo": 27230, + "tailor": 29870, + "tailored": 28275, + "tailoring": 46357, + "tails": 16066, + "tain": 2841, + "tain": 1908, + "taine": 21214, + "taine": 32299, + "tained": 10212, + "taining": 7565, + "tainment": 30063, + "tains": 3952, + "tainted": 47211, + "taipei": 24356, + "tair": 29143, + "tairp": 43707, + "tait": 45325, + "taiwan": 36319, + "taiwan": 12626, + "taiwanese": 41416, + "taj": 28937, + "taj": 24805, + "taji": 46358, + "tak": 15070, + "tak": 14458, + "taka": 24070, + "taka": 40968, + "take": 5052, + "take": 1172, + "takeaway": 25737, + "takeaways": 32080, + "takeme": 41748, + "taken": 2807, + "takeoff": 32789, + "takeover": 11863, + "taker": 17939, + "takers": 30775, + "takes": 2633, + "takin": 30890, + "taking": 2019, + "taku": 48168, + "tal": 976, + "tal": 2066, + "tala": 29845, + "talaga": 35349, + "talbot": 30585, + "tale": 33971, + "tale": 7798, + "talent": 30435, + "talent": 5114, + "talented": 5331, + "talents": 16136, + "tales": 9469, + "tali": 12122, + "tali": 45406, + "taliban": 20788, + "talis": 36480, + "tality": 15631, + "talk": 12462, + "talk": 1841, + "talked": 10153, + "talkin": 26040, + "talking": 31463, + "talking": 2578, + "talks": 3237, + "tall": 11664, + "tall": 7771, + "talla": 21528, + "tallade": 44220, + "tallahassee": 37832, + "taller": 23470, + "tallest": 19774, + "tallinn": 45079, + "tally": 16323, + "talon": 47897, + "tam": 2661, + "tam": 12246, + "tama": 45424, + "tamanna": 48055, + "tamar": 22901, + "tamara": 35697, + "tame": 38557, + "tame": 32778, + "tamed": 40575, + "tami": 39429, + "tamil": 23046, + "tamil": 14033, + "tamilnadu": 32371, + "tamine": 42566, + "tammy": 28396, + "tampa": 10906, + "tampab": 37852, + "tamu": 34105, + "tan": 2123, + "tan": 5039, + "tana": 21396, + "tand": 20244, + "tandem": 33756, + "tane": 13344, + "tane": 24923, + "taneous": 22275, + "taneously": 24422, + "tang": 10425, + "tang": 20794, + "tanger": 31844, + "tangerine": 42045, + "tangible": 44823, + "tangle": 36568, + "tangled": 33587, + "tango": 24089, + "tani": 31374, + "tani": 32985, + "tania": 45369, + "tank": 29858, + "tank": 6172, + "tanker": 25020, + "tanks": 14223, + "tann": 19174, + "tanner": 22001, + "tanning": 27985, + "tans": 27332, + "tant": 41383, + "tant": 41695, + "tante": 48262, + "tanto": 45685, + "tany": 34410, + "tanya": 26800, + "tanz": 47399, + "tanzania": 15711, + "tao": 29084, + "tao": 18923, + "tap": 17923, + "tap": 7888, + "tapas": 27361, + "tape": 18332, + "tape": 5749, + "taped": 33219, + "tapes": 17903, + "tapestry": 33525, + "taping": 24355, + "tapp": 27644, + "tapp": 27764, + "tapped": 26649, + "tapping": 27882, + "tapro": 34415, + "taproom": 40266, + "taps": 23267, + "tar": 2002, + "tar": 6977, + "tara": 15264, + "tarak": 37813, + "taran": 32370, + "tarantino": 41180, + "tarde": 48670, + "tardis": 35410, + "tares": 34587, + "targe": 9620, + "target": 38556, + "target": 5400, + "targeted": 14968, + "targeting": 15818, + "targets": 12468, + "tari": 4238, + "tari": 38012, + "tarian": 11762, + "tarians": 42789, + "taries": 47291, + "tariff": 40220, + "tariffs": 28335, + "tariq": 42526, + "tarmac": 44294, + "taro": 26264, + "tarot": 23702, + "tart": 16707, + "tart": 14120, + "tartan": 35064, + "tarts": 29799, + "tary": 31729, + "tary": 5065, + "tarzan": 45463, + "tas": 6538, + "tas": 10163, + "tash": 35272, + "tasha": 44967, + "task": 39189, + "task": 10549, + "tasks": 19453, + "tasmania": 22429, + "tasmanian": 45102, + "tassel": 49276, + "tast": 10839, + "taste": 14314, + "taste": 5219, + "tasted": 22827, + "tasteof": 38097, + "taster": 29743, + "tastes": 13736, + "tastic": 21337, + "tasting": 7656, + "tastings": 49273, + "tasty": 43390, + "tasty": 8568, + "tat": 2652, + "tat": 21592, + "tata": 19300, + "tate": 44476, + "tate": 13295, + "tath": 27566, + "tati": 31433, + "tatiana": 48837, + "tation": 5280, + "tations": 32324, + "tator": 18791, + "tators": 37206, + "tats": 44557, + "tatt": 9232, + "tatted": 41605, + "tattoo": 15980, + "tattoo": 6325, + "tattooed": 28541, + "tattoos": 14900, + "tatum": 26103, + "tau": 6620, + "tau": 20510, + "taught": 9306, + "taun": 23910, + "taunton": 40681, + "taurus": 32881, + "taver": 37776, + "tavern": 18644, + "taw": 33868, + "taw": 40289, + "tawa": 29035, + "tawards": 14351, + "tax": 4581, + "tax": 3879, + "taxation": 36847, + "taxes": 11462, + "taxi": 25160, + "taxi": 11380, + "taxider": 47420, + "taxis": 34009, + "taxpay": 17986, + "taxpayer": 30978, + "taxpayers": 25503, + "tay": 6542, + "tay": 15073, + "taya": 38484, + "tayl": 3913, + "taylor": 9044, + "taylor": 3961, + "taylorswift": 18936, + "tayo": 33941, + "taz": 41475, + "taz": 31870, + "tb": 1990, + "tb": 7490, + "tba": 34363, + "tball": 8390, + "tball": 1467, + "tbc": 31807, + "tbd": 45548, + "tbh": 13238, + "tbi": 45868, + "tbl": 42962, + "tbli": 43664, + "tblightning": 44178, + "tbo": 34255, + "tbr": 46643, + "tbs": 37368, + "tbt": 2950, + "tc": 6820, + "tc": 5454, + "tca": 35116, + "tch": 10744, + "tch": 4048, + "tches": 42001, + "tcm": 21501, + "tcm": 26588, + "tcmparty": 24338, + "tcot": 8995, + "tcs": 39107, + "tcu": 26791, + "td": 20578, + "td": 3192, + "tdf": 21844, + "tdi": 45621, + "tdp": 47009, + "tds": 20238, + "tdsb": 29836, + "te": 600, + "te": 756, + "tea": 41053, + "tea": 3274, + "teach": 2043, + "teach": 6865, + "teacher": 18051, + "teacher": 4008, + "teachers": 5069, + "teaches": 17110, + "teaching": 5141, + "teachings": 32119, + "teal": 22821, + "team": 2085, + "team": 1027, + "teamcanada": 46636, + "teamed": 20590, + "teamgb": 40971, + "teaming": 24392, + "teammate": 17900, + "teammates": 13921, + "teams": 3891, + "teamsisd": 34703, + "teamusa": 28625, + "teamwork": 14657, + "teaparty": 33065, + "teapo": 35745, + "teapot": 40749, + "tear": 15802, + "tear": 11862, + "tearful": 46873, + "tearing": 24785, + "tears": 7688, + "teas": 23003, + "teas": 29314, + "tease": 25163, + "teased": 49122, + "teaser": 8982, + "teasers": 48990, + "teases": 28509, + "teasing": 36507, + "teat": 26376, + "teatime": 48948, + "teatro": 35756, + "teau": 24931, + "tebow": 37797, + "tec": 17381, + "tec": 11612, + "tech": 1782, + "tech": 2061, + "techcrunch": 42110, + "techn": 6252, + "technews": 31787, + "technic": 16639, + "technic": 37666, + "technical": 49231, + "technical": 7582, + "technically": 23180, + "technician": 22540, + "technicians": 35513, + "techno": 2599, + "techno": 17564, + "technological": 23068, + "technologies": 10040, + "technology": 3089, + "techs": 41353, + "ted": 4841, + "ted": 775, + "tedcruz": 27517, + "teddy": 25758, + "teddy": 11798, + "tedly": 8539, + "tedu": 42517, + "tedx": 17950, + "tedx": 41504, + "tee": 12676, + "tee": 3385, + "teed": 13692, + "teen": 5398, + "teen": 4697, + "teenage": 14069, + "teenager": 19338, + "teenagers": 25989, + "teenchoice": 28203, + "teens": 12375, + "teenth": 20249, + "teenwolf": 40067, + "teeny": 41622, + "teer": 48648, + "tees": 9641, + "teessi": 43295, + "teeth": 8225, + "tega": 29508, + "tegr": 39801, + "teh": 18720, + "teh": 29601, + "tehran": 26399, + "tein": 33223, + "tej": 46724, + "tek": 17489, + "tek": 18294, + "tekken": 29843, + "tel": 4978, + "tel": 2226, + "telang": 23469, + "telangana": 26386, + "tele": 3103, + "tele": 32851, + "telecom": 21057, + "telecommunications": 39900, + "telegram": 26780, + "telegraph": 14713, + "telephone": 17243, + "telescope": 19037, + "telethon": 49266, + "televised": 39470, + "television": 8608, + "telford": 38323, + "tell": 16069, + "tell": 2330, + "teller": 20415, + "tellers": 42707, + "telling": 5507, + "tells": 5217, + "tellu": 42511, + "telly": 31475, + "tels": 43607, + "telugu": 22927, + "tely": 5630, + "tem": 2404, + "tem": 17536, + "tema": 45881, + "teme": 43378, + "temp": 2684, + "temp": 11097, + "tempe": 36723, + "temper": 5981, + "temper": 35521, + "temperature": 9543, + "temperatures": 11575, + "tempered": 40521, + "tempest": 36053, + "templ": 16679, + "template": 18591, + "templates": 30498, + "temple": 21841, + "temple": 5620, + "temples": 24024, + "tempo": 19625, + "tempor": 4858, + "temporal": 43656, + "temporarily": 23189, + "temporary": 6513, + "temps": 11668, + "tempt": 28460, + "temptation": 30118, + "tempted": 26226, + "tempting": 34876, + "ten": 1149, + "ten": 2581, + "tenant": 16954, + "tenants": 26023, + "tenay": 45384, + "tenberg": 31329, + "tend": 17630, + "tend": 21252, + "tendency": 47277, + "tender": 23020, + "tender": 9838, + "tenderloin": 42750, + "tenders": 44741, + "tending": 35084, + "tendon": 48459, + "tends": 39962, + "tene": 24868, + "tened": 13682, + "tener": 29054, + "teneri": 28000, + "tenerife": 29401, + "teners": 41307, + "teness": 18018, + "teng": 34016, + "teng": 28474, + "tennant": 29310, + "tennes": 9514, + "tennessee": 10053, + "tennis": 31504, + "tennis": 5298, + "tenor": 30521, + "tens": 14062, + "tense": 23518, + "tension": 15221, + "tensions": 24224, + "tenstein": 49139, + "tent": 18505, + "tent": 10782, + "tentative": 48238, + "tenth": 27483, + "tention": 12191, + "tents": 30730, + "tenure": 30739, + "teo": 18665, + "tep": 31806, + "tequ": 17502, + "tequila": 18510, + "ter": 704, + "ter": 652, + "tera": 15155, + "teras": 44830, + "tere": 11329, + "tered": 49272, + "tered": 4389, + "terence": 33806, + "teresa": 19081, + "teri": 30917, + "teria": 22685, + "terie": 42276, + "tering": 7929, + "term": 40991, + "term": 4780, + "termin": 4766, + "terminal": 11816, + "terminals": 44091, + "terminator": 29609, + "terminology": 48896, + "terms": 8663, + "tern": 41572, + "tern": 12959, + "terns": 25251, + "tero": 20727, + "tero": 24697, + "terps": 41471, + "terr": 3921, + "terra": 22366, + "terra": 18816, + "terrac": 28549, + "terrace": 13820, + "terraces": 47508, + "terracotta": 45123, + "terrain": 20184, + "terran": 43726, + "terre": 33888, + "terre": 27537, + "terrell": 39494, + "terrence": 38746, + "terrestrial": 46299, + "terri": 4504, + "terri": 36722, + "terrible": 9741, + "terribly": 34558, + "terrier": 14455, + "terriers": 47047, + "terrific": 13837, + "terrified": 28204, + "terrifying": 18526, + "territ": 10720, + "territorial": 39163, + "territories": 32846, + "territory": 13936, + "terror": 9596, + "terror": 9327, + "terrori": 6836, + "terrorism": 10583, + "terrorist": 10575, + "terrorists": 12835, + "terry": 19378, + "terry": 8561, + "ters": 24102, + "ters": 1737, + "terti": 48386, + "tery": 4184, + "tes": 8019, + "tes": 3609, + "tesco": 15434, + "tese": 33320, + "tesla": 12254, + "tess": 21807, + "tess": 20840, + "tessa": 32063, + "test": 7738, + "test": 1628, + "testam": 23477, + "testament": 24609, + "tested": 10576, + "tester": 32707, + "testi": 18373, + "testic": 42364, + "testify": 33088, + "testifying": 46347, + "testim": 12553, + "testimonial": 28834, + "testimony": 18672, + "testing": 4967, + "testo": 42428, + "testosterone": 45168, + "tests": 8715, + "tet": 40468, + "tet": 13275, + "tetra": 40902, + "tetris": 45934, + "teu": 47152, + "teuk": 39979, + "teur": 27120, + "tex": 2056, + "tex": 11728, + "texan": 35287, + "texan": 38386, + "texans": 17580, + "texanscheer": 43717, + "texas": 15713, + "texas": 3403, + "texaste": 46469, + "text": 18169, + "text": 4160, + "textbook": 25952, + "textbooks": 44041, + "texted": 29004, + "textile": 19789, + "textiles": 24326, + "texting": 18600, + "texts": 12767, + "texture": 16505, + "textured": 32168, + "textures": 28063, + "tey": 32395, + "tez": 22664, + "tf": 18828, + "tf": 5001, + "tfc": 30186, + "tfl": 29918, + "tford": 22493, + "tful": 17108, + "tfw": 16741, + "tg": 7665, + "tg": 11981, + "tgif": 14483, + "th": 513, + "th": 640, + "tha": 18470, + "tha": 4715, + "thab": 38219, + "thad": 48339, + "thai": 28054, + "thai": 8825, + "thail": 7258, + "thailand": 7469, + "thak": 22801, + "thakur": 38427, + "thal": 7967, + "thal": 12323, + "thala": 17784, + "thalai": 25206, + "thalaivar": 44918, + "thalap": 39789, + "thalapathy": 45405, + "thalapathy": 23324, + "thall": 36007, + "tham": 11761, + "tham": 8896, + "thames": 43472, + "thames": 15321, + "than": 792, + "than": 1126, + "thand": 44465, + "thane": 21463, + "thang": 24870, + "thani": 31322, + "thank": 2790, + "thank": 1144, + "thanked": 32079, + "thankful": 38839, + "thankful": 6217, + "thankfully": 22089, + "thanking": 21989, + "thanks": 5672, + "thanks": 1085, + "thanksgiving": 45732, + "thanksgiving": 6167, + "thanku": 45710, + "thankyou": 18050, + "thankyou": 9911, + "thanniversary": 35564, + "thanos": 36709, + "thanx": 25095, + "thar": 14396, + "thar": 38843, + "thard": 43474, + "that": 6303, + "that": 682, + "thatcher": 32496, + "thats": 44636, + "thats": 9254, + "thaw": 26081, + "thaw": 47229, + "thbewithyou": 41067, + "thc": 20091, + "thcentury": 49111, + "thd": 28219, + "thday": 37801, + "the": 599, + "the": 518, + "thea": 15935, + "thea": 25429, + "thead": 25259, + "theal": 45728, + "thealth": 31398, + "thear": 43283, + "theart": 44678, + "theast": 8378, + "theastern": 17877, + "theat": 2263, + "theater": 39438, + "theater": 6128, + "theaters": 14689, + "theatre": 19857, + "theatre": 3292, + "theatres": 21680, + "theatrical": 26833, + "theband": 27695, + "thebeatles": 35645, + "thebest": 40883, + "thebest": 25856, + "thebig": 24732, + "theblack": 47718, + "thec": 48659, + "thed": 31405, + "thedaily": 33550, + "theday": 4408, + "thedream": 39417, + "thee": 44475, + "thee": 15108, + "theeconomist": 44518, + "theellenshow": 35342, + "thefilm": 31665, + "theflash": 25434, + "theforce": 40002, + "theforceawakens": 48033, + "theft": 13286, + "thefuture": 34287, + "thegame": 24428, + "thegood": 28594, + "thegreat": 28721, + "thei": 44522, + "their": 911, + "theirs": 29297, + "thel": 5403, + "thelast": 23495, + "thelastjedi": 47992, + "theless": 27712, + "theli": 15277, + "thelittle": 46872, + "thelo": 47036, + "thelove": 40668, + "thelove": 43200, + "them": 5435, + "them": 1180, + "themasters": 48378, + "theme": 38524, + "theme": 5849, + "themed": 10126, + "themes": 17849, + "themet": 48183, + "themovie": 27062, + "themselves": 6503, + "then": 5929, + "then": 1594, + "thenburg": 45209, + "thene": 17012, + "thenew": 24212, + "thenext": 47881, + "thenight": 43336, + "theno": 37172, + "thenorth": 34338, + "theo": 17043, + "theo": 18084, + "theod": 26653, + "theodore": 30743, + "theological": 41162, + "theology": 24095, + "theon": 34653, + "theone": 46231, + "theopen": 41438, + "theore": 22690, + "theoretical": 35585, + "theori": 34804, + "theories": 23937, + "theory": 7143, + "thepeople": 33597, + "thepersonal": 29981, + "thepersonalnetwork": 30016, + "thephoto": 18303, + "thephotohour": 18607, + "ther": 1160, + "ther": 743, + "therap": 4499, + "therapeu": 19332, + "therapeutic": 23240, + "therapeutics": 49101, + "therapies": 30179, + "therapist": 20608, + "therapists": 34763, + "therapper": 49340, + "therapy": 5257, + "there": 5283, + "there": 997, + "thereal": 8074, + "thereal": 41140, + "thereby": 43308, + "thered": 10208, + "therefore": 16865, + "theres": 18494, + "theresa": 14126, + "therese": 47996, + "theresistance": 22845, + "theri": 28967, + "theri": 45297, + "therine": 26807, + "therine": 9239, + "thering": 7891, + "therland": 25351, + "thermal": 13689, + "thermo": 22303, + "thermom": 31138, + "thermometer": 38172, + "thermost": 42391, + "thern": 10919, + "thern": 3137, + "thero": 13165, + "theroad": 29807, + "therock": 30036, + "theroy": 38146, + "thers": 1959, + "thes": 40556, + "thes": 6460, + "thescript": 47061, + "these": 40366, + "these": 1071, + "theses": 39388, + "thesimpsons": 45513, + "thesims": 34192, + "thesis": 10673, + "thessal": 41491, + "thessaloni": 41753, + "thest": 35343, + "thesun": 45617, + "theta": 27694, + "thetic": 7954, + "thetimes": 36039, + "thevamp": 33701, + "thevoice": 47206, + "thevoice": 30258, + "thewalkingdead": 18087, + "thewanted": 43008, + "theworld": 44988, + "theworld": 17475, + "thex": 35990, + "they": 15174, + "they": 889, + "theyre": 28266, + "thfc": 17729, + "thi": 2362, + "thi": 9111, + "thia": 17943, + "thiago": 44537, + "thian": 23214, + "thians": 28187, + "thibau": 48351, + "thic": 26107, + "thic": 11794, + "thick": 18417, + "thick": 11006, + "thicker": 43302, + "thickness": 40754, + "thief": 18508, + "thier": 25595, + "thierry": 32929, + "thieves": 17899, + "thigh": 47124, + "thigh": 22877, + "thighs": 30847, + "thik": 20512, + "thika": 44619, + "thill": 31266, + "thim": 42331, + "thin": 2178, + "thin": 7847, + "thine": 47192, + "thing": 7499, + "thing": 946, + "things": 30670, + "things": 1739, + "thingsto": 43924, + "thingy": 36888, + "think": 9820, + "think": 1331, + "thinkbig": 26015, + "thinkbigsundaywithmarsha": 26666, + "thinker": 34577, + "thinkers": 32779, + "thinkin": 34443, + "thinking": 3291, + "thinks": 6109, + "thinner": 47247, + "thir": 6030, + "third": 32102, + "third": 3981, + "thirds": 42582, + "thirst": 23563, + "thirsty": 39731, + "thirsty": 17521, + "thirteen": 34209, + "thirty": 20813, + "thiru": 43292, + "this": 4340, + "this": 589, + "thisday": 6532, + "thisdayin": 33641, + "thisdayinhistory": 46913, + "thisi": 7299, + "thisis": 14887, + "thismorning": 36245, + "thistle": 29039, + "thistory": 28904, + "thium": 21804, + "thletics": 17765, + "thm": 10407, + "thman": 30079, + "thms": 19874, + "thn": 44155, + "thn": 45587, + "thnx": 25480, + "tho": 1325, + "tho": 5025, + "thof": 18943, + "thofjuly": 21613, + "thol": 29319, + "thole": 31029, + "tholes": 42465, + "thology": 9881, + "thom": 2585, + "thom": 24094, + "thomas": 12574, + "thomas": 3888, + "thome": 21289, + "thomp": 37274, + "thompson": 42181, + "thompson": 8535, + "thomson": 24151, + "thon": 38776, + "thon": 8924, + "thong": 37058, + "thood": 15623, + "thor": 4130, + "thor": 13691, + "thora": 46866, + "thorn": 12957, + "thorn": 18466, + "thorne": 18025, + "thorns": 33650, + "thornton": 23592, + "thorough": 15294, + "thorough": 34788, + "thoroughbred": 43248, + "thoroughly": 19750, + "thorpe": 18099, + "thos": 41965, + "those": 1753, + "thot": 33736, + "thou": 1513, + "thou": 17781, + "though": 2846, + "thought": 23948, + "thought": 2449, + "thoughtful": 19592, + "thoughts": 3618, + "thour": 27125, + "thousand": 9344, + "thousands": 7089, + "thouse": 40318, + "thouse": 7819, + "thoven": 23078, + "thr": 1111, + "thr": 19138, + "thra": 17761, + "thra": 32797, + "thrash": 38262, + "thre": 1607, + "thread": 31108, + "thread": 8815, + "threads": 24957, + "threat": 7527, + "threat": 7212, + "threaten": 26097, + "threatened": 16391, + "threatening": 16400, + "threatens": 20555, + "threats": 12766, + "three": 21615, + "three": 2097, + "thren": 41776, + "thresh": 29779, + "threshold": 33791, + "threw": 12746, + "thri": 8713, + "thrift": 27779, + "thrill": 21023, + "thrilled": 7879, + "thriller": 9653, + "thrilling": 20101, + "thrills": 39829, + "thrive": 17669, + "thriving": 22677, + "thro": 2101, + "thro": 28624, + "throat": 16371, + "thrombo": 47585, + "throne": 15999, + "thrones": 8072, + "throp": 34939, + "throttle": 37139, + "through": 6091, + "through": 1417, + "throughout": 6721, + "throughs": 48278, + "throw": 3315, + "throw": 6293, + "throwback": 6001, + "throwback": 5058, + "throwbackthursday": 6326, + "thrower": 40199, + "throwing": 9734, + "thrown": 15079, + "throws": 14723, + "thru": 23856, + "thru": 6162, + "thrush": 46133, + "thrust": 40202, + "ths": 2079, + "tht": 23554, + "thu": 3837, + "thu": 14153, + "thub": 25660, + "thug": 37212, + "thug": 18137, + "thugs": 27686, + "thul": 28368, + "thulhu": 37560, + "thum": 14679, + "thumb": 19514, + "thumb": 18674, + "thumbnail": 32365, + "thumbs": 17599, + "thun": 32267, + "thunder": 6161, + "thunder": 8951, + "thunderbird": 45131, + "thunderbirds": 44286, + "thunderbolt": 43596, + "thunderstorm": 12005, + "thunderstorms": 19525, + "thunt": 46763, + "thur": 1837, + "thur": 21704, + "thurman": 41291, + "thurs": 9908, + "thursday": 11218, + "thursday": 2221, + "thursdaymotivation": 39375, + "thursdays": 21444, + "thursdaythoughts": 14866, + "thurst": 33970, + "thus": 12457, + "thusi": 9488, + "thwaite": 48469, + "thweeksary": 30871, + "thx": 5913, + "thy": 7804, + "thy": 3362, + "thyme": 29805, + "thyro": 25174, + "thyroid": 32558, + "ti": 555, + "ti": 2605, + "tia": 6709, + "tial": 2826, + "tially": 14503, + "tian": 23011, + "tian": 8125, + "tians": 35182, + "tiara": 38322, + "tib": 47868, + "tibet": 19927, + "tibet": 22234, + "tibetan": 24057, + "tible": 11453, + "tic": 890, + "tic": 1550, + "tica": 9669, + "tical": 34191, + "tical": 4342, + "tically": 13375, + "ticals": 30861, + "tice": 3122, + "tich": 48769, + "tician": 43358, + "ticism": 26491, + "tick": 24640, + "tick": 15617, + "ticket": 25740, + "ticket": 4500, + "ticketing": 44432, + "tickets": 2015, + "ticking": 35842, + "tickle": 42999, + "ticks": 40269, + "tico": 17670, + "ticon": 45996, + "tics": 2419, + "ticul": 15538, + "ticus": 44277, + "tid": 26002, + "tid": 23727, + "tidal": 21949, + "tide": 15698, + "tide": 9105, + "tides": 25524, + "tidy": 23858, + "tie": 14072, + "tie": 3422, + "tied": 9889, + "tiem": 34762, + "tien": 47538, + "tiene": 43438, + "tier": 14390, + "tier": 6598, + "tierney": 45693, + "tiers": 24604, + "ties": 25556, + "ties": 2499, + "tiest": 18300, + "tiesto": 46367, + "tif": 23216, + "tiff": 11112, + "tiff": 20699, + "tiffany": 30467, + "tiffany": 14446, + "tification": 43923, + "tified": 40854, + "tiful": 29123, + "tify": 6677, + "tig": 31999, + "tiger": 11954, + "tiger": 6531, + "tigers": 6934, + "tigh": 31365, + "tight": 25763, + "tight": 9123, + "tighten": 46653, + "tighter": 48193, + "tightly": 37568, + "tights": 29581, + "tijuana": 45273, + "tik": 24986, + "tik": 32403, + "tiki": 30107, + "til": 6124, + "til": 1763, + "tile": 26217, + "tile": 8227, + "tiles": 10607, + "tility": 38180, + "till": 17462, + "till": 4267, + "tilla": 26063, + "tillerson": 47738, + "tilly": 41199, + "tilt": 23601, + "tim": 1292, + "tim": 3863, + "timate": 4754, + "timb": 26627, + "timber": 14441, + "timber": 16246, + "timberlake": 28274, + "timbers": 39911, + "timberwolves": 41190, + "time": 3764, + "time": 788, + "timed": 32727, + "timehop": 19944, + "timel": 23549, + "timelapse": 48154, + "timeless": 15558, + "timeline": 11492, + "timely": 19250, + "timeout": 41536, + "timer": 19725, + "timers": 44574, + "times": 26445, + "times": 1661, + "timesnow": 45487, + "timesof": 32522, + "timesofindia": 44182, + "timetable": 31971, + "timeto": 29187, + "timing": 13624, + "timm": 22444, + "timmy": 33252, + "timo": 13390, + "timo": 33777, + "timothy": 42087, + "timothy": 18560, + "timp": 42166, + "tin": 1310, + "tin": 5420, + "tina": 9257, + "tinder": 24287, + "tine": 22341, + "ting": 7451, + "ting": 694, + "tinged": 44829, + "tings": 35332, + "tini": 26839, + "tink": 39278, + "tinker": 45272, + "tinker": 40910, + "tino": 20538, + "tins": 37359, + "tint": 40497, + "tinted": 42618, + "tiny": 21716, + "tiny": 5591, + "tio": 27562, + "tion": 2274, + "tion": 740, + "tional": 22460, + "tional": 2986, + "tionality": 24514, + "tionally": 12409, + "tionary": 8381, + "tione": 44318, + "tioned": 9083, + "tioning": 15528, + "tionist": 25732, + "tions": 1371, + "tious": 14255, + "tip": 15383, + "tip": 4623, + "tipoff": 44521, + "tipp": 32294, + "tipped": 31878, + "tipper": 38095, + "tipperary": 45612, + "tipping": 27827, + "tips": 3173, + "tipton": 48809, + "tiptuesday": 42112, + "tique": 37772, + "tir": 25467, + "tir": 38462, + "tire": 29128, + "tire": 9362, + "tired": 6533, + "tireless": 39835, + "tirelessly": 41548, + "tires": 15533, + "tiring": 42630, + "tiru": 36033, + "tis": 7839, + "tis": 7394, + "tise": 13745, + "tisgarh": 40538, + "tish": 45148, + "tish": 28784, + "tism": 27113, + "tiss": 28155, + "tissue": 15368, + "tissues": 32172, + "tist": 7902, + "tista": 25580, + "tists": 25944, + "tit": 1991, + "tit": 13202, + "tita": 40936, + "titan": 13496, + "titan": 15516, + "titanic": 20729, + "titanium": 24409, + "titans": 13066, + "titi": 17434, + "titi": 48504, + "title": 28033, + "title": 3644, + "titled": 9939, + "titles": 9780, + "tito": 26838, + "titus": 36102, + "tium": 21975, + "tiv": 1835, + "tiva": 41886, + "tive": 14640, + "tive": 1420, + "tively": 9883, + "tiveness": 20955, + "tives": 7570, + "tivity": 9859, + "tivo": 32162, + "tix": 5835, + "tiz": 19376, + "tj": 18890, + "tj": 18988, + "tk": 22344, + "tk": 20676, + "tko": 37347, + "tks": 38739, + "tl": 14325, + "tl": 8190, + "tland": 30697, + "tlap": 41976, + "tlc": 22047, + "tle": 39141, + "tle": 5825, + "tles": 39363, + "tless": 17427, + "tlot": 41080, + "tls": 47367, + "tly": 37483, + "tly": 1646, + "tm": 9430, + "tm": 7789, + "tman": 20796, + "tmc": 35263, + "tment": 26485, + "tml": 39445, + "tmltalk": 42260, + "tmnt": 32444, + "tmobile": 34901, + "tmr": 35906, + "tmrw": 16496, + "tms": 44496, + "tmund": 23801, + "tmw": 45827, + "tmz": 37248, + "tn": 3827, + "tn": 7248, + "tna": 21150, + "tnam": 8079, + "tner": 34922, + "tness": 35212, + "tney": 9523, + "tng": 35898, + "tnt": 20659, + "tnx": 38220, + "to": 580, + "to": 531, + "toa": 17916, + "toad": 26096, + "toast": 24654, + "toast": 10920, + "toasted": 23533, + "toaster": 39061, + "toasty": 44726, + "tob": 24260, + "tobac": 12611, + "tobacco": 13905, + "tobago": 39482, + "tobe": 17534, + "tobe": 28740, + "tober": 18162, + "tober": 2925, + "toberfest": 26249, + "tobi": 40335, + "tobi": 48374, + "tobias": 32464, + "tobin": 42466, + "toby": 29659, + "toby": 18333, + "toc": 41907, + "toc": 30643, + "tock": 25274, + "tod": 38239, + "tod": 33568, + "toda": 47141, + "todas": 36150, + "today": 11800, + "today": 721, + "todayin": 32957, + "todays": 13513, + "todayshow": 29739, + "todd": 10398, + "todd": 9951, + "toddler": 17772, + "toddlers": 36719, + "toddy": 38926, + "todo": 48857, + "todo": 23087, + "todos": 33355, + "toe": 47756, + "toe": 11344, + "toes": 16511, + "tof": 6659, + "toff": 27319, + "toffee": 34880, + "tofficial": 47953, + "tofthe": 23678, + "toftheday": 20566, + "tofu": 24692, + "tog": 45715, + "toge": 1903, + "together": 17858, + "together": 1952, + "togo": 26729, + "tography": 33968, + "toh": 26851, + "toi": 7472, + "toi": 26941, + "toid": 49124, + "toile": 43148, + "toilet": 11071, + "toilets": 24027, + "toire": 39534, + "tok": 16690, + "tok": 27010, + "token": 32634, + "token": 17134, + "tokens": 23562, + "tokyo": 35038, + "tokyo": 6667, + "tol": 4678, + "tol": 32962, + "told": 3527, + "tole": 15677, + "toledo": 19812, + "toler": 12150, + "tolerance": 20377, + "tolerant": 38536, + "tolerate": 35556, + "tolkien": 32989, + "toll": 44090, + "toll": 14155, + "tollywood": 42016, + "tology": 34799, + "tom": 999, + "tom": 2435, + "toma": 42360, + "toma": 44710, + "tomas": 35944, + "tomas": 27178, + "tomat": 12041, + "tomato": 9867, + "tomatoes": 13004, + "tomb": 37187, + "tomb": 15582, + "tombs": 48613, + "tombstone": 45729, + "tome": 24137, + "tome": 24283, + "tomi": 46290, + "tomlin": 46649, + "tomlinson": 17484, + "tommorow": 42871, + "tommy": 16573, + "tommy": 8876, + "tomo": 31223, + "tomo": 34434, + "tomor": 1277, + "tomorrow": 19728, + "tomorrow": 1293, + "tomorrowland": 34951, + "tomorrows": 32258, + "tomorrowspaper": 35005, + "tomorrowspaperstoday": 35190, + "tomp": 43544, + "tompkins": 49068, + "toms": 10545, + "tomy": 18730, + "ton": 838, + "ton": 917, + "tona": 13459, + "tone": 32366, + "tone": 8408, + "toned": 29426, + "toner": 40614, + "tones": 14744, + "tong": 21510, + "tonga": 37882, + "tongue": 44820, + "tongue": 13626, + "tongues": 39837, + "toni": 17766, + "toni": 17171, + "tonic": 17808, + "tonics": 34647, + "tonight": 1009, + "tonights": 23312, + "tonite": 13449, + "tonka": 42781, + "tonline": 45867, + "tonne": 42450, + "tonnes": 24813, + "tons": 7555, + "tony": 9150, + "tony": 4767, + "tonyawards": 46068, + "too": 1843, + "too": 1256, + "took": 2280, + "tool": 13718, + "tool": 5999, + "toolbox": 46599, + "toolkit": 29849, + "tools": 5771, + "toom": 27550, + "toon": 24664, + "toon": 19701, + "toonami": 48336, + "toons": 35345, + "toor": 42590, + "tooth": 15316, + "tooth": 12030, + "toothbrush": 36841, + "toothpaste": 37322, + "tooting": 42969, + "top": 5534, + "top": 1253, + "topaz": 46125, + "tope": 32149, + "tope": 42239, + "topeka": 46884, + "topia": 29618, + "topic": 8720, + "topical": 37464, + "topics": 11916, + "topless": 37415, + "topo": 23008, + "topoli": 30152, + "topp": 19529, + "topped": 12588, + "topper": 31780, + "toppers": 41651, + "topping": 21071, + "toppings": 47554, + "topps": 20201, + "tops": 8154, + "topshop": 40953, + "topus": 21495, + "tor": 937, + "tor": 1208, + "tora": 45147, + "torah": 37945, + "toral": 45282, + "torch": 31921, + "torch": 15820, + "tore": 38066, + "tore": 19385, + "tored": 38046, + "torg": 33214, + "tori": 17689, + "tori": 17539, + "toria": 23732, + "torial": 28029, + "torian": 48399, + "tories": 14193, + "torino": 29178, + "torio": 34235, + "torn": 8572, + "torn": 18023, + "tornad": 24676, + "tornado": 9062, + "tornadoes": 28254, + "toro": 17892, + "toron": 37407, + "toronto": 16866, + "toronto": 4514, + "torpe": 34093, + "torpedo": 46582, + "torquay": 45738, + "torque": 31940, + "torre": 39563, + "torre": 38009, + "torrent": 42317, + "torrential": 41158, + "torres": 16049, + "tors": 2546, + "tortilla": 32683, + "torto": 24170, + "tortoise": 30178, + "torture": 16013, + "tortured": 29900, + "tory": 29390, + "tory": 4214, + "tos": 6094, + "tosc": 37719, + "tose": 38154, + "tosh": 17109, + "toshi": 31744, + "toss": 19656, + "tossed": 31296, + "tot": 4618, + "tot": 23659, + "total": 13507, + "total": 4445, + "totally": 5440, + "totals": 25772, + "tote": 48145, + "tote": 19031, + "totem": 45376, + "totes": 37199, + "tothe": 12222, + "toto": 39823, + "tots": 24978, + "totten": 14360, + "tottenham": 14889, + "tou": 1879, + "tou": 29261, + "touch": 9480, + "touch": 4526, + "touchdown": 18664, + "touchdowns": 37905, + "touched": 13190, + "touches": 14832, + "touching": 14088, + "touchscreen": 39095, + "tough": 12063, + "tough": 5499, + "tougher": 33722, + "toughest": 23773, + "toughness": 45522, + "toulou": 27145, + "toulouse": 30267, + "tour": 2710, + "tour": 1760, + "tourde": 39247, + "toured": 27654, + "touri": 4224, + "touring": 11853, + "tourism": 23661, + "tourism": 6556, + "tourist": 12123, + "tourists": 15546, + "tournament": 4097, + "tournaments": 23058, + "tourney": 12603, + "tours": 8948, + "tous": 37424, + "tout": 22300, + "touts": 41274, + "tov": 28970, + "tow": 11557, + "tow": 18653, + "toward": 8508, + "towards": 4447, + "towed": 45419, + "towel": 15953, + "towels": 26578, + "tower": 26669, + "tower": 4730, + "towering": 39444, + "towers": 12701, + "towie": 44613, + "towin": 45819, + "towing": 36963, + "town": 4068, + "town": 1605, + "townfc": 33981, + "townhall": 33408, + "townhouse": 40178, + "towns": 14173, + "townsend": 26826, + "township": 14622, + "townsville": 47330, + "towork": 48233, + "tox": 7742, + "tox": 16145, + "toxic": 27436, + "toxic": 12348, + "toxicity": 41234, + "toxin": 48899, + "toxins": 36618, + "toy": 14387, + "toy": 5988, + "toya": 37602, + "toyo": 7644, + "toyota": 8908, + "toys": 39508, + "toys": 7162, + "tp": 23760, + "tp": 15188, + "tpp": 29411, + "tps": 35246, + "tq": 43066, + "tr": 635, + "tr": 6337, + "tra": 752, + "tra": 2483, + "trac": 2266, + "trace": 48611, + "trace": 14767, + "traced": 47956, + "traces": 30913, + "tracey": 25558, + "tracing": 27897, + "track": 10887, + "track": 2700, + "tracked": 27049, + "tracker": 18123, + "tracking": 10428, + "tracklist": 39777, + "tracks": 7579, + "tract": 4690, + "traction": 10644, + "tractor": 14607, + "tractors": 37854, + "tracy": 32984, + "tracy": 15508, + "trad": 48716, + "trad": 38037, + "trade": 10457, + "trade": 3629, + "traded": 18860, + "trademark": 25011, + "trader": 17700, + "traders": 19112, + "trades": 18519, + "trading": 40083, + "trading": 6520, + "tradio": 20689, + "tradition": 20838, + "tradition": 8784, + "traditional": 41113, + "traditional": 5604, + "traditionally": 35532, + "traditions": 18016, + "traf": 3227, + "trafal": 32461, + "trafalgar": 36969, + "traff": 31571, + "traffic": 12080, + "traffic": 3399, + "trafficking": 15983, + "trafford": 22912, + "trage": 12430, + "tragedy": 14082, + "tragic": 14828, + "tragically": 39599, + "trail": 11523, + "trail": 4921, + "trailblazer": 41015, + "trailblazers": 35954, + "trailer": 4700, + "trailers": 24862, + "trailing": 37427, + "trails": 10633, + "train": 9122, + "train": 3231, + "trained": 10874, + "trainee": 25795, + "trainees": 30382, + "trainer": 9767, + "trainers": 18871, + "training": 34508, + "training": 2199, + "trains": 9541, + "trait": 35160, + "traitor": 31760, + "traitors": 42633, + "traits": 25748, + "trajec": 42042, + "trak": 24065, + "tral": 14609, + "tram": 9800, + "tram": 17500, + "tramp": 46289, + "trampol": 32905, + "trampoline": 42800, + "tramrahim": 35220, + "tran": 1357, + "tran": 22031, + "trance": 30584, + "trance": 18671, + "trancefamily": 39630, + "trane": 35779, + "tranqu": 18912, + "tranquil": 35764, + "tranquility": 36688, + "trans": 1826, + "trans": 8126, + "transaction": 24881, + "transactions": 21653, + "transat": 37872, + "transatlantic": 40703, + "transc": 21073, + "transcend": 47087, + "transcript": 39008, + "transcription": 48765, + "transfer": 22659, + "transfer": 7134, + "transferred": 29700, + "transferring": 40924, + "transfers": 21621, + "transform": 8142, + "transform": 12288, + "transformation": 34204, + "transformation": 7832, + "transformational": 47135, + "transformationtuesday": 36511, + "transformative": 38106, + "transformed": 17453, + "transformer": 38235, + "transformers": 17843, + "transforming": 44470, + "transforming": 19251, + "transforms": 30312, + "transgender": 17732, + "transi": 32236, + "transit": 10174, + "transiti": 22939, + "transition": 11391, + "transitional": 41519, + "transitioning": 43586, + "transitions": 39374, + "transl": 12243, + "translate": 22655, + "translated": 20752, + "translates": 36334, + "translating": 42156, + "translation": 12153, + "translations": 41367, + "translator": 36230, + "translucent": 49052, + "transm": 18861, + "transmission": 16103, + "transmitted": 48605, + "transmitter": 40457, + "transp": 11726, + "transpa": 18524, + "transparen": 16108, + "transparency": 16828, + "transparent": 19017, + "transpl": 16038, + "transplant": 41871, + "transplant": 18771, + "transplantation": 45207, + "transpor": 19406, + "transport": 10231, + "transport": 7362, + "transportation": 10911, + "transported": 29089, + "transporter": 43568, + "transporting": 42259, + "trap": 36224, + "trap": 9677, + "trape": 42435, + "trapped": 15592, + "traps": 28517, + "tras": 30638, + "trash": 39215, + "trash": 9798, + "traum": 22263, + "trauma": 13846, + "traumati": 46613, + "traumatic": 29958, + "trav": 7586, + "trav": 46955, + "trave": 35357, + "travel": 2824, + "travel": 1949, + "travelblog": 35957, + "travelblogger": 25494, + "travelchat": 46455, + "traveled": 20384, + "traveler": 17794, + "travelers": 20644, + "travelgram": 40069, + "traveling": 9365, + "travelled": 23428, + "traveller": 22546, + "travellers": 29583, + "travelling": 11190, + "travelphotography": 22808, + "travelpics": 32293, + "travels": 11472, + "traveltips": 36260, + "traveltuesday": 16713, + "traverse": 35058, + "travi": 46971, + "travis": 27441, + "travis": 12287, + "traw": 42288, + "trax": 34421, + "tray": 38470, + "tray": 14621, + "trays": 39798, + "trc": 41803, + "tre": 975, + "tre": 6033, + "treach": 46005, + "tread": 26182, + "tread": 35658, + "treadmill": 37780, + "treas": 8591, + "treason": 28103, + "treasure": 9922, + "treasured": 48068, + "treasurer": 26985, + "treasures": 16500, + "treasury": 20956, + "treat": 3968, + "treat": 3901, + "treated": 9772, + "treating": 13842, + "treatment": 4869, + "treatments": 15839, + "treats": 8878, + "treaty": 19967, + "treble": 33194, + "trecht": 33812, + "tree": 13354, + "tree": 2677, + "treehouse": 42387, + "trees": 4682, + "trek": 13236, + "trek": 8136, + "trekking": 25293, + "trell": 35159, + "tremb": 44043, + "tremend": 14659, + "tremendous": 15988, + "tren": 2579, + "trench": 23846, + "trenches": 38723, + "trend": 19986, + "trend": 6643, + "trending": 6087, + "trends": 7015, + "trendsetter": 46666, + "trendy": 23072, + "trent": 45885, + "trent": 15548, + "trenton": 37470, + "tres": 23569, + "tress": 4733, + "tresses": 24273, + "trevor": 23437, + "trevor": 13219, + "trex": 42114, + "trey": 36670, + "trey": 16939, + "tri": 924, + "tri": 9618, + "triad": 45602, + "trial": 5991, + "trials": 10992, + "triangle": 14615, + "triathlon": 18080, + "trib": 45151, + "tribal": 16629, + "tribe": 19943, + "tribe": 11365, + "tribeca": 35184, + "tribes": 26546, + "tribu": 3028, + "tribun": 14311, + "tribunal": 32911, + "tribune": 18556, + "tribute": 5493, + "tributes": 15537, + "tric": 9511, + "tric": 4081, + "trich": 39519, + "trick": 17177, + "trick": 8172, + "tricks": 13177, + "tricky": 22319, + "trics": 31437, + "trident": 35491, + "tridge": 18722, + "tried": 4554, + "tries": 4315, + "trife": 48962, + "trigge": 30509, + "trigger": 16158, + "triggered": 30924, + "triggers": 37319, + "tright": 29915, + "tril": 40626, + "trill": 39297, + "trilli": 39350, + "trillion": 20160, + "trilo": 15183, + "trilogy": 16862, + "trim": 14182, + "trimmed": 40657, + "trin": 6628, + "trinidad": 26244, + "trinity": 30744, + "trinity": 12267, + "trio": 10263, + "trip": 23421, + "trip": 2529, + "tripad": 37189, + "tripadvisor": 38708, + "triple": 16519, + "triple": 7673, + "triplets": 48601, + "tripod": 36141, + "tripoli": 40095, + "trippin": 43073, + "tripping": 35229, + "trippy": 35137, + "trips": 12292, + "tris": 29690, + "trish": 40511, + "trish": 37179, + "trisha": 39152, + "tristan": 25497, + "trit": 37087, + "triton": 45437, + "triu": 14782, + "trium": 21065, + "triumph": 26507, + "triumph": 15307, + "triumphant": 41918, + "trivi": 21228, + "trivia": 10642, + "triviatuesday": 45499, + "trix": 41017, + "tro": 1046, + "tro": 3332, + "trock": 44368, + "trojan": 30653, + "trojans": 25310, + "trol": 10306, + "troll": 39737, + "troll": 17103, + "trolley": 25124, + "trolling": 28552, + "trolls": 20890, + "tromb": 32390, + "trombone": 44423, + "tron": 19057, + "tron": 10684, + "tronic": 34258, + "tronics": 34397, + "troom": 23691, + "troop": 12492, + "troop": 24054, + "trooper": 18327, + "troopers": 23576, + "troops": 10109, + "trop": 31585, + "trope": 41150, + "trophies": 20998, + "trophy": 42676, + "trophy": 6502, + "tropic": 21794, + "tropic": 36736, + "tropical": 41699, + "tropical": 8686, + "tropics": 36940, + "tros": 40456, + "trose": 36022, + "trot": 30453, + "trotter": 38287, + "trou": 5181, + "troubad": 49037, + "trouble": 25669, + "trouble": 7848, + "troubled": 25568, + "troubles": 27254, + "trough": 39761, + "troupe": 34803, + "trous": 19727, + "trousers": 23172, + "trout": 14853, + "trove": 45350, + "trow": 46914, + "troy": 26283, + "troy": 12819, + "trs": 24770, + "tru": 931, + "tru": 25326, + "truck": 14781, + "truck": 4629, + "trucker": 45918, + "truckers": 43404, + "trucking": 26208, + "trucks": 9569, + "trude": 39017, + "trudeau": 15752, + "true": 13096, + "true": 2328, + "truec": 37583, + "truelove": 45711, + "truffle": 23064, + "truffles": 37057, + "truly": 4545, + "trum": 11766, + "trum": 11399, + "truman": 29414, + "trump": 9124, + "trump": 1797, + "trumpet": 23681, + "trumpp": 45550, + "trumprussia": 39135, + "trumps": 29793, + "trumptrain": 43595, + "trun": 16163, + "trun": 46661, + "trunk": 18347, + "trunks": 38531, + "truro": 43507, + "truss": 46080, + "trust": 17691, + "trust": 3876, + "truste": 17356, + "trusted": 16538, + "trustee": 30803, + "trustees": 28853, + "trusting": 33221, + "trusts": 27507, + "trustworthy": 46840, + "trusty": 37955, + "truth": 21335, + "truth": 4319, + "truths": 27179, + "trx": 31620, + "try": 4487, + "try": 1209, + "tryin": 31085, + "trying": 2551, + "tryna": 15702, + "tryout": 43832, + "tryouts": 28053, + "ts": 2290, + "ts": 590, + "tsa": 25977, + "tsal": 20438, + "tsb": 45015, + "tsc": 37437, + "tsch": 38778, + "tsd": 20611, + "tse": 49144, + "tsfor": 42654, + "tsford": 32823, + "tsh": 42872, + "tshirt": 14907, + "tshirts": 29377, + "tsi": 40048, + "tsi": 37867, + "tsk": 43600, + "tsla": 35681, + "tsm": 43452, + "tsman": 20046, + "tsn": 44921, + "tsn": 26896, + "tson": 42353, + "tson": 47140, + "tsp": 34230, + "tsu": 13950, + "tsu": 20175, + "tsun": 19155, + "tsunami": 24286, + "tsville": 29080, + "tt": 971, + "tt": 1402, + "tta": 2646, + "ttc": 27668, + "tte": 23105, + "tte": 3070, + "tted": 15163, + "tten": 11351, + "tten": 17479, + "tter": 18691, + "tter": 5165, + "tters": 6318, + "ttes": 9293, + "tti": 5237, + "ttin": 36589, + "tting": 1188, + "ttino": 47389, + "ttip": 46993, + "ttle": 9253, + "ttm": 46838, + "tto": 8759, + "tto": 8105, + "tton": 10562, + "ttot": 12480, + "ttp": 30828, + "ttr": 47589, + "tts": 11570, + "ttt": 17256, + "tttt": 33119, + "ttu": 44006, + "ttv": 24281, + "tty": 11457, + "tty": 1856, + "tu": 764, + "tu": 5760, + "tua": 41344, + "tual": 4799, + "tuan": 37297, + "tub": 34907, + "tub": 15450, + "tube": 38229, + "tube": 3308, + "tuber": 30371, + "tuberculo": 42606, + "tuberculosis": 43129, + "tubes": 22870, + "tubing": 40794, + "tubs": 41705, + "tubular": 48786, + "tuc": 14456, + "tuc": 43871, + "tuck": 22398, + "tucked": 26923, + "tucker": 39703, + "tucker": 15726, + "tucket": 32677, + "tucson": 17250, + "tudor": 24547, + "tue": 17515, + "tues": 2283, + "tues": 12113, + "tuesday": 10209, + "tuesday": 2519, + "tuesdaymotivation": 25432, + "tuesdays": 23195, + "tuesdaythoughts": 17988, + "tuf": 44510, + "tuff": 38868, + "tug": 47032, + "tug": 27902, + "tuition": 21129, + "tuk": 39271, + "tuk": 14993, + "tul": 9069, + "tul": 40837, + "tula": 36332, + "tulane": 44893, + "tulip": 28389, + "tulips": 30886, + "tulsa": 18850, + "tum": 12932, + "tum": 8843, + "tumb": 8831, + "tumble": 38284, + "tumbler": 48790, + "tumbling": 46226, + "tumblr": 11841, + "tummy": 26053, + "tumor": 22616, + "tumors": 39894, + "tumour": 45129, + "tun": 1415, + "tun": 21349, + "tuna": 15037, + "tundra": 39899, + "tune": 11427, + "tune": 3300, + "tuned": 5898, + "tunein": 16809, + "tuner": 42905, + "tunes": 31688, + "tunes": 10810, + "tunesapp": 32550, + "tung": 47940, + "tung": 31092, + "tuni": 16270, + "tunic": 43495, + "tuning": 19585, + "tunisia": 23346, + "tunnel": 11096, + "tunnels": 29814, + "tuous": 28738, + "tup": 37956, + "tup": 4507, + "tupac": 31506, + "tups": 44855, + "tur": 985, + "tur": 17182, + "tura": 16127, + "tural": 45143, + "tural": 4261, + "turb": 18973, + "turban": 48515, + "turbine": 26880, + "turbines": 38863, + "turbo": 23578, + "turbo": 13668, + "turbul": 31100, + "turbulent": 47871, + "ture": 4321, + "ture": 941, + "tured": 3987, + "turer": 11993, + "turers": 16956, + "tures": 2400, + "turf": 36762, + "turf": 12510, + "turi": 11896, + "turin": 36251, + "turing": 5812, + "turismo": 30202, + "turk": 8254, + "turk": 32507, + "turkey": 35977, + "turkey": 4790, + "turkeys": 37991, + "turkish": 48199, + "turkish": 9278, + "turks": 34344, + "turmeric": 34044, + "turmoil": 37751, + "turn": 5522, + "turn": 2105, + "turnaround": 32719, + "turnbull": 27863, + "turned": 3771, + "turner": 42867, + "turner": 8777, + "turning": 4976, + "turno": 21377, + "turnout": 11654, + "turnover": 30794, + "turnpike": 38301, + "turns": 3185, + "turnt": 28887, + "turntable": 37953, + "turnup": 30591, + "turo": 29224, + "turquo": 19390, + "turquoise": 19899, + "turt": 13716, + "turtle": 35943, + "turtle": 10912, + "turtles": 17862, + "tus": 24828, + "tus": 7079, + "tusc": 17909, + "tuscal": 42638, + "tuscaloosa": 44375, + "tuscan": 42865, + "tuscany": 20885, + "tuss": 31741, + "tut": 35121, + "tutor": 10054, + "tutor": 27858, + "tutorial": 12857, + "tutorials": 30973, + "tutoring": 37532, + "tutti": 46880, + "tutu": 35845, + "tux": 28720, + "tux": 49186, + "tuxedo": 40173, + "tv": 3197, + "tv": 1583, + "tvc": 49190, + "tvd": 25889, + "tvmiaw": 38554, + "tvn": 44232, + "tvs": 27114, + "tvtime": 19947, + "tvxq": 43968, + "tw": 966, + "tw": 12842, + "twa": 46954, + "twain": 30689, + "twal": 48126, + "tware": 5707, + "twc": 41217, + "twd": 29440, + "twd": 19343, + "twdfamily": 38218, + "twe": 18365, + "tweak": 48870, + "tweaks": 42661, + "twee": 1330, + "tweed": 26904, + "tweeps": 14928, + "tweet": 11826, + "tweet": 1842, + "tweeta": 32024, + "tweetapicture": 40596, + "tweeted": 7841, + "tweeter": 32876, + "tweeters": 31713, + "tweeting": 8901, + "tweets": 3560, + "tweetyour": 45033, + "twel": 14476, + "twelf": 39443, + "twelfth": 44072, + "twell": 38722, + "twell": 30162, + "twelve": 19694, + "twent": 27027, + "twenti": 35167, + "twenty": 13016, + "twentyon": 39609, + "twentyonepilots": 40007, + "twer": 13923, + "twerk": 28506, + "twi": 5537, + "twice": 6970, + "twick": 34326, + "twickenham": 39619, + "twil": 12804, + "twili": 35754, + "twilight": 46366, + "twilight": 14512, + "twill": 43703, + "twin": 9342, + "twin": 6769, + "twine": 42775, + "twinkle": 36545, + "twinning": 30156, + "twinpeaks": 32042, + "twins": 8040, + "twist": 10589, + "twisted": 18233, + "twister": 45933, + "twists": 34149, + "twit": 1643, + "twit": 18704, + "twitart": 27709, + "twitch": 13251, + "twitch": 9153, + "twitter": 7546, + "twitter": 1989, + "twitterkurds": 32722, + "twitterstorians": 35389, + "two": 17211, + "two": 1237, + "twol": 31964, + "twood": 40404, + "twood": 13245, + "twp": 33283, + "twright": 46778, + "twt": 6825, + "twx": 26830, + "twy": 45861, + "tx": 6636, + "tx": 5200, + "txhsfb": 34757, + "txlege": 26995, + "txst": 40761, + "txt": 24595, + "txwx": 22995, + "ty": 1260, + "ty": 744, + "tya": 41273, + "tycoon": 36803, + "tye": 43097, + "tyfree": 41215, + "tyga": 41952, + "tying": 22559, + "tyl": 47537, + "tyler": 14787, + "tyler": 7058, + "tym": 45772, + "tyne": 27000, + "tyne": 29729, + "tyour": 16823, + "type": 15673, + "type": 3877, + "typed": 40753, + "typeface": 44969, + "types": 7543, + "typewriter": 42180, + "typho": 17486, + "typhoon": 21110, + "typic": 21648, + "typical": 9854, + "typically": 23175, + "typing": 20102, + "typo": 18831, + "typo": 29076, + "typography": 24332, + "tyr": 15590, + "tyran": 46921, + "tyranny": 35402, + "tyre": 38330, + "tyre": 16864, + "tyres": 21376, + "tyrone": 30226, + "tyson": 16616, + "tz": 7710, + "tz": 4983, + "tzer": 45267, + "tzky": 47127, + "tzman": 46032, + "tzu": 34354, + "té": 27208, + "té": 39694, + "u": 84, + "u": 340, + "ua": 34075, + "ua": 8441, + "uaap": 46753, + "uaap": 43774, + "uab": 35587, + "uae": 9752, + "ual": 1921, + "ually": 10767, + "uan": 33062, + "uas": 38339, + "uav": 30303, + "ub": 18430, + "ub": 13494, + "uba": 29768, + "ubc": 42479, + "ubc": 29455, + "ube": 30892, + "uber": 25896, + "uber": 10668, + "ubi": 26758, + "ubio": 32867, + "ubiquit": 48129, + "ubis": 28248, + "ubisoft": 32051, + "ubs": 43851, + "ubun": 28184, + "ubuntu": 30791, + "uc": 4903, + "uc": 12438, + "uca": 30942, + "ucc": 44844, + "ucc": 29138, + "ucci": 30746, + "uccino": 30409, + "ucd": 44746, + "ucd": 43514, + "ucf": 24414, + "uch": 19465, + "uch": 22394, + "uchi": 37473, + "uci": 46354, + "uci": 28925, + "uck": 34189, + "ucl": 12013, + "ucl": 13647, + "ucla": 37667, + "ucla": 17259, + "ucn": 49036, + "uconn": 30549, + "ud": 6560, + "ud": 5765, + "uda": 22800, + "udaipur": 49385, + "uddin": 43035, + "ude": 37016, + "ude": 35194, + "ue": 16696, + "ue": 1190, + "uefa": 19189, + "uel": 24231, + "uer": 45951, + "ues": 2526, + "uf": 17777, + "uf": 19230, + "ufc": 20396, + "ufc": 6490, + "uff": 45701, + "ufo": 19443, + "ufos": 48234, + "ug": 3754, + "ug": 16061, + "uga": 16056, + "ugand": 25965, + "uganda": 11125, + "ugandan": 44206, + "ugby": 30658, + "ugh": 39736, + "ugh": 12755, + "ugliest": 43543, + "ugly": 36070, + "ugly": 8159, + "ugu": 18144, + "uh": 17661, + "uh": 9219, + "uhc": 44974, + "uhh": 35938, + "uhhh": 45270, + "uhm": 35614, + "uhur": 29434, + "uhuru": 35690, + "ui": 17326, + "ui": 11458, + "uil": 29395, + "uit": 30696, + "uit": 47584, + "uj": 33266, + "uji": 39672, + "uk": 2294, + "uk": 1432, + "uka": 23294, + "uke": 48836, + "uke": 28577, + "uked": 48987, + "uki": 37435, + "uki": 9009, + "ukin": 34996, + "ukip": 20360, + "uklabour": 36902, + "ukmfg": 38764, + "uko": 33562, + "ukone": 24682, + "ukrain": 15468, + "ukraine": 7768, + "ukrainian": 16927, + "ukrunchat": 34481, + "uku": 29541, + "uku": 36082, + "ukulele": 39094, + "ul": 914, + "ul": 6625, + "ula": 34104, + "ula": 9506, + "ular": 4927, + "ulary": 21701, + "ulate": 20467, + "ulation": 32896, + "ule": 35616, + "ules": 26274, + "ulf": 49331, + "uli": 41841, + "uli": 22174, + "ull": 33254, + "ulla": 30577, + "ullah": 45310, + "ullivan": 45252, + "ulls": 37418, + "ulo": 46084, + "ulo": 36738, + "ulous": 42490, + "ulous": 4281, + "ulously": 20167, + "ulster": 29709, + "ulster": 24639, + "ult": 4380, + "ulti": 11925, + "ulties": 21884, + "ultimat": 16522, + "ultimate": 34684, + "ultimate": 5377, + "ultimatefan": 48372, + "ultimatefanlive": 48644, + "ultimately": 23023, + "ultr": 25636, + "ultra": 11398, + "ultra": 8118, + "ultram": 44519, + "ultrasound": 29717, + "ulture": 22272, + "ulty": 8036, + "ulu": 41815, + "ulu": 15659, + "ulum": 17235, + "uly": 33220, + "ulysses": 46114, + "um": 1622, + "um": 1008, + "uma": 29982, + "uma": 9256, + "uman": 27112, + "umar": 25656, + "umass": 39390, + "umatic": 45006, + "umb": 7493, + "umber": 19195, + "umbrel": 34773, + "umbrella": 17143, + "umbrellas": 42782, + "umbria": 39287, + "umc": 39491, + "umd": 42067, + "ume": 38480, + "umen": 42832, + "uments": 25924, + "umer": 23539, + "umes": 21403, + "umi": 48772, + "umi": 15458, + "umich": 41294, + "umin": 31542, + "umm": 26129, + "umm": 21215, + "ummer": 47628, + "ummm": 33665, + "umni": 31739, + "ump": 22224, + "umpire": 36214, + "ums": 8643, + "umu": 39788, + "un": 569, + "un": 2271, + "una": 6385, + "unable": 17793, + "unacceptable": 25234, + "unanim": 20800, + "unanimous": 33520, + "unanimously": 31798, + "unanswered": 43611, + "unarmed": 41541, + "unas": 41366, + "unavailable": 48430, + "unaware": 33347, + "unbeat": 37056, + "unbeatable": 40267, + "unbeaten": 19228, + "unbeliev": 11383, + "unbelievable": 13306, + "unbelievably": 33781, + "unborn": 37257, + "unboxing": 32866, + "unbreakable": 32956, + "unbroken": 49271, + "unc": 24921, + "unc": 15322, + "uncanny": 32556, + "uncertain": 30384, + "uncertainty": 23956, + "unch": 1527, + "unchanged": 34272, + "uncharted": 34560, + "unci": 25521, + "unciation": 34117, + "uncle": 31537, + "uncle": 8002, + "unclear": 32955, + "uncles": 45335, + "uncomfortable": 22470, + "uncommon": 34888, + "uncondition": 46561, + "unconditional": 31112, + "unconscious": 34791, + "unconstitutional": 43585, + "unconventional": 39440, + "uncover": 33031, + "uncovered": 28234, + "uncture": 38736, + "uncut": 41056, + "und": 9762, + "und": 9732, + "unda": 39932, + "undant": 25377, + "unday": 29338, + "unde": 45226, + "undead": 40105, + "undecided": 49368, + "undefeated": 15326, + "undeni": 38424, + "under": 1473, + "under": 1798, + "underage": 45669, + "underattack": 35075, + "undercover": 21595, + "underdog": 44266, + "undere": 21675, + "underestim": 23348, + "underestimate": 31794, + "undergo": 31545, + "undergoing": 26419, + "undergrad": 38331, + "undergraduate": 24320, + "underground": 9396, + "undering": 30826, + "underlying": 31812, + "undermine": 42839, + "underneath": 20857, + "underrated": 19494, + "unders": 20376, + "understand": 47582, + "understand": 4600, + "understanding": 7522, + "understands": 21607, + "understatement": 38296, + "understood": 17303, + "undertaker": 40144, + "undertaking": 49067, + "undertale": 48283, + "underthe": 41161, + "underwater": 14760, + "underway": 6273, + "underwear": 21154, + "underwood": 21474, + "underworld": 34760, + "undi": 23845, + "undisclosed": 39334, + "undo": 35454, + "undocumented": 35414, + "undoub": 38836, + "undoubtedly": 42204, + "undp": 26691, + "une": 4522, + "une": 10966, + "unearth": 32716, + "unearthed": 36632, + "unemp": 15139, + "unemployed": 32721, + "unemployment": 19350, + "unes": 6394, + "unesco": 16216, + "uneven": 43204, + "unex": 9484, + "unexpe": 10802, + "unexpec": 31829, + "unexpected": 12293, + "unexpectedly": 35622, + "unf": 29285, + "unfair": 22193, + "unfinished": 26526, + "unfit": 45367, + "unfold": 38681, + "unfollow": 38797, + "unfor": 14010, + "unforgettable": 16173, + "unfortun": 10194, + "unfortunate": 22361, + "unfortunately": 12863, + "unfpa": 45048, + "ung": 10439, + "ung": 4334, + "unga": 19151, + "ungsoo": 25582, + "unh": 25365, + "unhappy": 26528, + "unhcr": 43451, + "unhealthy": 30994, + "uni": 1107, + "uni": 5926, + "unic": 7648, + "unicef": 38286, + "unicef": 19259, + "unicorn": 15660, + "unicorns": 35183, + "unidenti": 33707, + "unidentified": 35563, + "unification": 45036, + "unified": 20876, + "uniform": 11075, + "uniforms": 17838, + "unil": 32388, + "unilever": 48654, + "uniof": 21218, + "union": 14210, + "union": 3503, + "unions": 18353, + "unis": 30482, + "unis": 39266, + "unisex": 27609, + "unison": 46694, + "unit": 28522, + "unit": 5695, + "unite": 15078, + "unite": 11305, + "uniteblue": 20935, + "united": 10898, + "united": 2690, + "unitedstates": 39636, + "unitedway": 47486, + "unites": 32061, + "uniting": 31318, + "units": 10394, + "unity": 38300, + "unity": 8581, + "univ": 36680, + "univ": 14896, + "univer": 15574, + "univers": 5855, + "universal": 19148, + "universal": 8754, + "universe": 6104, + "universi": 41692, + "universit": 26019, + "universities": 16408, + "university": 40728, + "university": 2182, + "universityof": 46158, + "unk": 5542, + "unknown": 8685, + "unl": 43807, + "unlawful": 42305, + "unle": 19677, + "unlea": 23893, + "unleash": 26706, + "unleashed": 27955, + "unless": 10602, + "unlike": 16694, + "unlikely": 18904, + "unlimited": 11015, + "unlock": 18649, + "unlocked": 16770, + "unlocking": 40810, + "unlucky": 35029, + "unlv": 42283, + "unmanned": 36751, + "unmatched": 46054, + "unn": 38364, + "unnamed": 44985, + "unnecessary": 24100, + "unner": 31481, + "unning": 43282, + "unnoticed": 42807, + "uno": 32446, + "uno": 17078, + "unofficial": 22506, + "unpacking": 43589, + "unpaid": 32811, + "unparalleled": 44396, + "unplugged": 31724, + "unpopular": 40232, + "unprece": 23054, + "unprecedented": 23344, + "unpredictable": 38684, + "unra": 45150, + "unreal": 46980, + "unreal": 15636, + "unrelated": 38644, + "unreleased": 29654, + "unrest": 36452, + "uns": 25908, + "unsafe": 32071, + "unsc": 36395, + "unseen": 19069, + "unsigned": 39346, + "unsolved": 40836, + "unsplash": 46196, + "unstable": 34730, + "unstopp": 22105, + "unstoppable": 23484, + "unsuccessful": 47478, + "unsung": 33015, + "unsure": 26396, + "unt": 19654, + "unt": 6537, + "until": 1942, + "untitled": 21309, + "unto": 19801, + "untold": 32206, + "untouch": 44509, + "untouched": 42764, + "unused": 29636, + "unusual": 12613, + "unusually": 36465, + "unve": 6685, + "unveil": 20483, + "unveiled": 13572, + "unveiling": 20327, + "unveils": 15057, + "unwanted": 25285, + "unwind": 34064, + "unya": 37142, + "uo": 30874, + "uo": 36162, + "uof": 11155, + "uoft": 37329, + "uon": 48144, + "uous": 40185, + "up": 1083, + "up": 705, + "upa": 31727, + "upbeat": 39201, + "upcoming": 4196, + "upcycled": 46552, + "upd": 3226, + "update": 2491, + "updated": 5974, + "updates": 4904, + "updating": 22792, + "uper": 38082, + "uper": 33056, + "upfront": 42064, + "upgrade": 10365, + "upgraded": 18577, + "upgrades": 21253, + "upgrading": 34368, + "uph": 14128, + "uphill": 42767, + "uphol": 26195, + "uphold": 43897, + "upholstery": 44556, + "upl": 41939, + "uplift": 45389, + "uplifting": 29546, + "upload": 13968, + "uploaded": 16793, + "uploading": 30145, + "upon": 23524, + "upon": 5067, + "upp": 19549, + "upp": 45946, + "upper": 22465, + "upper": 7067, + "upri": 15982, + "upright": 29818, + "uprising": 26006, + "upro": 28922, + "ups": 6926, + "upscale": 47501, + "upset": 11214, + "upsets": 42637, + "upside": 15362, + "upstairs": 21387, + "upstate": 33335, + "upstream": 45517, + "upthe": 31510, + "upto": 26575, + "upton": 31910, + "uptown": 23807, + "upward": 32526, + "upwards": 34915, + "uq": 39591, + "ur": 565, + "ur": 1775, + "ura": 29337, + "ura": 3544, + "urable": 40194, + "ural": 23547, + "ural": 33948, + "uran": 16197, + "uranium": 29850, + "urban": 7931, + "urban": 5800, + "urbanart": 40834, + "urd": 47880, + "urday": 19742, + "urdu": 29976, + "ure": 5514, + "ure": 726, + "ured": 4210, + "urer": 20864, + "ures": 2288, + "urg": 35995, + "urge": 14852, + "urged": 23790, + "urgency": 47612, + "urgent": 13693, + "urgently": 34534, + "urges": 16692, + "urging": 27748, + "uri": 11052, + "uri": 8699, + "urie": 46429, + "urin": 45245, + "urine": 28864, + "uring": 1351, + "url": 23464, + "urn": 38075, + "uro": 17343, + "uro": 5925, + "urology": 48585, + "urope": 14918, + "urs": 4794, + "urself": 31942, + "urst": 19181, + "urstruly": 34751, + "urstrulymahesh": 35314, + "ursula": 38390, + "urt": 24309, + "uru": 16322, + "uru": 11768, + "uruguay": 27931, + "urus": 14246, + "urve": 24583, + "ury": 8642, + "ury": 2106, + "us": 904, + "us": 718, + "usa": 9491, + "usa": 2547, + "usability": 46736, + "usable": 22890, + "usaf": 25017, + "usage": 19137, + "usaid": 34507, + "usair": 36742, + "usairforce": 42179, + "usarmy": 19132, + "usatoday": 40263, + "usav": 36056, + "usb": 10281, + "usc": 13346, + "usc": 14995, + "uscg": 43932, + "usd": 7485, + "usda": 25829, + "use": 4419, + "use": 1483, + "used": 32289, + "used": 2026, + "useful": 9784, + "useless": 20154, + "usemb": 39700, + "user": 21248, + "user": 7031, + "username": 28162, + "users": 7433, + "uses": 5282, + "useum": 45189, + "usf": 32385, + "usf": 28942, + "usgs": 35103, + "ush": 12001, + "ush": 18335, + "usher": 27411, + "ushi": 47734, + "usi": 25540, + "usic": 34909, + "usic": 16753, + "using": 1996, + "usky": 45778, + "usl": 42113, + "usm": 40041, + "usmc": 21678, + "usmnt": 30662, + "usn": 40579, + "usnavy": 24500, + "usnews": 43752, + "uso": 21539, + "usopen": 21782, + "usp": 26651, + "usps": 39980, + "usrc": 33274, + "uss": 11545, + "uss": 9260, + "ussia": 29553, + "ussoccer": 42828, + "ussr": 32697, + "ust": 35501, + "ust": 24725, + "usu": 4254, + "usu": 40434, + "usual": 6129, + "usually": 8296, + "usur": 45582, + "uswnt": 35255, + "ut": 1419, + "ut": 3641, + "uta": 42706, + "uta": 25925, + "utah": 27474, + "utah": 9312, + "utc": 18196, + "utd": 10493, + "ute": 16856, + "ute": 3130, + "uten": 32089, + "uter": 39197, + "utes": 2850, + "uth": 48819, + "uth": 44750, + "uti": 24568, + "util": 28824, + "utili": 17015, + "utilities": 27210, + "utility": 14941, + "utilize": 36861, + "utilized": 47604, + "utilizing": 40212, + "utm": 47853, + "utmost": 42352, + "uto": 18866, + "uto": 13683, + "utopia": 34433, + "utpol": 42605, + "utr": 48726, + "utrecht": 37216, + "uts": 11740, + "utsa": 37528, + "utt": 17096, + "uttar": 40168, + "uttarak": 33755, + "uttarakhand": 35655, + "utter": 18769, + "utter": 24558, + "utterly": 21353, + "utto": 42183, + "utv": 36351, + "utz": 45320, + "uu": 5702, + "uu": 14553, + "uuu": 44355, + "uuu": 27656, + "uuuu": 16720, + "uuuu": 40797, + "uv": 23777, + "uv": 15977, + "uva": 23908, + "uw": 13933, + "uw": 19166, + "uwe": 48785, + "uwu": 35544, + "ux": 9251, + "ux": 6213, + "uy": 31929, + "uy": 48113, + "uz": 19398, + "uz": 36991, + "uzbe": 43007, + "uzbekistan": 45024, + "uzzi": 48210, + "v": 85, + "v": 341, + "va": 4648, + "va": 1892, + "vaa": 37488, + "vable": 23088, + "vac": 3125, + "vac": 34085, + "vaca": 48215, + "vacancies": 26333, + "vacancy": 21247, + "vacant": 25262, + "vacation": 28336, + "vacation": 6561, + "vacations": 29002, + "vacay": 44716, + "vacc": 13342, + "vaccin": 19164, + "vaccinated": 48134, + "vaccination": 32518, + "vaccine": 47780, + "vaccine": 17493, + "vaccines": 25860, + "vach": 46211, + "vacu": 16058, + "vacuum": 18420, + "vad": 11880, + "vada": 46759, + "vader": 21908, + "vae": 39384, + "vag": 13015, + "vague": 42154, + "vah": 26921, + "vai": 26893, + "vai": 36802, + "vail": 21189, + "vain": 25538, + "vais": 28719, + "vaj": 34206, + "vak": 16288, + "vak": 41597, + "val": 1214, + "val": 1560, + "vala": 48525, + "valdez": 40617, + "vale": 35554, + "vale": 10820, + "valedic": 43525, + "valen": 12630, + "valence": 30225, + "valenci": 34183, + "valencia": 16559, + "valent": 3655, + "valent": 15300, + "valentin": 48631, + "valentina": 43741, + "valentine": 11208, + "valentine": 5876, + "valentines": 10259, + "valentinesday": 12369, + "valentino": 29624, + "valeri": 31951, + "valerie": 25592, + "valet": 45749, + "vali": 8230, + "valiant": 33804, + "valid": 15126, + "validation": 32536, + "valkyrie": 42326, + "vall": 23523, + "vall": 35295, + "vallarta": 47874, + "valle": 24857, + "valle": 29105, + "valley": 18354, + "valley": 3136, + "valleys": 28649, + "valor": 30930, + "vals": 7431, + "valu": 6291, + "valuable": 10056, + "valuation": 25894, + "value": 41358, + "value": 4602, + "valued": 17801, + "values": 8857, + "valve": 17001, + "valves": 33517, + "vam": 9983, + "vamo": 46718, + "vamos": 30346, + "vamp": 10680, + "vampi": 47017, + "vampire": 47576, + "vampire": 13220, + "vampires": 30868, + "vamps": 44810, + "van": 2446, + "van": 2451, + "vana": 20543, + "vanc": 6320, + "vance": 31447, + "vancou": 6750, + "vancouver": 31904, + "vancouver": 7208, + "vand": 11691, + "vandalism": 45664, + "vander": 16264, + "vanderbilt": 33524, + "vandy": 39268, + "vane": 43828, + "vaness": 13328, + "vanessa": 16836, + "vangogh": 47849, + "vanguard": 27916, + "vani": 15396, + "vani": 26459, + "vania": 10998, + "vanilla": 11974, + "vanished": 43783, + "vanishing": 48296, + "vanity": 48353, + "vanity": 22938, + "vans": 11711, + "vant": 26298, + "vantage": 31749, + "vanu": 42892, + "vanuatu": 48766, + "vap": 10462, + "vape": 25423, + "vape": 20219, + "vaping": 29403, + "vapor": 37167, + "vapor": 30729, + "vapori": 46183, + "var": 3187, + "var": 12998, + "vara": 47492, + "varan": 36585, + "varanasi": 39364, + "vard": 21866, + "vard": 8773, + "vardy": 47371, + "vare": 38159, + "vares": 42895, + "vargas": 32752, + "vari": 3354, + "variable": 26416, + "varian": 34334, + "variant": 20293, + "variants": 38312, + "variation": 26420, + "variations": 29025, + "varied": 32334, + "varies": 32543, + "varieties": 23805, + "variety": 8396, + "various": 7395, + "varsity": 43716, + "varsity": 8574, + "varun": 48120, + "varun": 22069, + "vary": 18855, + "varying": 36456, + "vas": 5669, + "vas": 5995, + "vasc": 40995, + "vascular": 19218, + "vase": 20431, + "vasi": 49092, + "vast": 24413, + "vast": 16414, + "vastly": 48257, + "vat": 11588, + "vat": 18363, + "vatican": 21030, + "vation": 37884, + "vau": 6391, + "vaugh": 25158, + "vaughan": 21392, + "vaughn": 29013, + "vaul": 27469, + "vault": 15240, + "vaus": 40217, + "vaux": 27403, + "vauxhall": 29173, + "vaw": 47952, + "vay": 48000, + "vaz": 38142, + "vb": 29365, + "vb": 8778, + "vball": 38329, + "vc": 28670, + "vc": 7952, + "vcs": 43528, + "vcu": 40102, + "vd": 9515, + "vday": 42055, + "ve": 673, + "ve": 563, + "vea": 43798, + "veal": 36616, + "veau": 24419, + "vec": 19912, + "vector": 40453, + "vector": 21533, + "ved": 19515, + "ved": 1102, + "veda": 44401, + "vedere": 45660, + "vedi": 47971, + "vee": 35708, + "vee": 17073, + "veen": 22432, + "veer": 21243, + "veer": 22058, + "veg": 9048, + "veg": 16460, + "vega": 22930, + "vegan": 15705, + "vegan": 5615, + "vegans": 48514, + "vegas": 20288, + "vegas": 4413, + "vege": 6219, + "vegetable": 15725, + "vegetables": 14119, + "vegetarian": 14600, + "vegetation": 33947, + "veggie": 19401, + "veggies": 16767, + "vehic": 3973, + "vehicle": 5299, + "vehicles": 8361, + "veil": 23516, + "vein": 29169, + "veins": 28867, + "veit": 30620, + "vel": 942, + "vel": 1287, + "vela": 34898, + "veld": 34011, + "veled": 15370, + "veli": 49166, + "veling": 37970, + "vell": 21173, + "vell": 32997, + "velo": 14357, + "velo": 33850, + "velocity": 23811, + "vels": 5109, + "velve": 37849, + "velvet": 11063, + "vely": 1708, + "vember": 3477, + "vement": 3129, + "vements": 11104, + "ven": 1240, + "ven": 1638, + "vena": 47442, + "vend": 10851, + "vending": 29202, + "vendor": 21261, + "vendors": 20353, + "vene": 5365, + "veness": 10516, + "venetian": 34336, + "venezia": 34139, + "venezu": 10939, + "venezuela": 12839, + "venezuelan": 34699, + "veng": 31526, + "venge": 27757, + "vengeance": 32057, + "veni": 31142, + "venice": 11010, + "vening": 47532, + "venison": 40037, + "venom": 42491, + "venom": 21588, + "vens": 20884, + "vent": 4373, + "vent": 5687, + "ventil": 39522, + "ventilation": 35066, + "venting": 15731, + "vention": 4122, + "vents": 12833, + "ventu": 48217, + "ventura": 20921, + "venture": 37046, + "venture": 12543, + "ventures": 20829, + "venue": 5097, + "venues": 18120, + "venus": 14691, + "ver": 624, + "ver": 667, + "vera": 13350, + "verage": 3725, + "verb": 34952, + "verbal": 26522, + "verbally": 39985, + "verbs": 45687, + "verde": 16935, + "verdi": 42306, + "verdict": 18030, + "vere": 11135, + "vere": 34707, + "vered": 2868, + "verge": 23913, + "veri": 11638, + "verification": 33521, + "verified": 22555, + "verify": 34722, + "vering": 4630, + "veriz": 19707, + "verizon": 21532, + "verma": 41261, + "vermont": 19241, + "vern": 2214, + "vern": 12586, + "verne": 45553, + "vernon": 18348, + "vero": 45217, + "vero": 38208, + "verona": 31819, + "veronic": 39551, + "veronica": 24039, + "vers": 1219, + "vers": 2094, + "versa": 35765, + "versace": 25422, + "versail": 29857, + "versailles": 32129, + "versary": 2940, + "versatile": 18110, + "versatility": 41340, + "verse": 39466, + "verse": 3131, + "verses": 30769, + "versi": 8934, + "version": 3273, + "versions": 16190, + "versity": 1906, + "verst": 42484, + "verstappen": 45064, + "versus": 14548, + "versy": 18522, + "vert": 11742, + "verte": 35158, + "verted": 48173, + "verti": 30459, + "vertical": 14293, + "vertigo": 42477, + "verton": 40632, + "verts": 37265, + "very": 11698, + "very": 1070, + "veryday": 37944, + "verything": 45174, + "ves": 9616, + "ves": 1003, + "vesmatter": 47636, + "vespa": 46029, + "vessel": 16387, + "vessels": 22822, + "vest": 31657, + "vest": 12473, + "vesti": 40349, + "vests": 41906, + "vet": 12294, + "vet": 5951, + "veter": 4330, + "veteran": 20797, + "veteran": 8814, + "veterans": 7092, + "veteransday": 26409, + "veterin": 43959, + "veterinary": 25458, + "veto": 36570, + "vets": 13113, + "vette": 17045, + "vettel": 28700, + "vevo": 35141, + "vex": 36187, + "vex": 43978, + "vey": 34792, + "vey": 3884, + "vez": 35987, + "vez": 17226, + "vf": 25966, + "vfl": 33726, + "vfx": 30149, + "vg": 40591, + "vg": 22346, + "vh": 46953, + "vh": 23847, + "vhs": 21932, + "vi": 603, + "vi": 4259, + "via": 1048, + "viable": 25752, + "viadu": 37012, + "viaduct": 39113, + "vial": 39951, + "vian": 40487, + "vian": 16124, + "vibe": 37974, + "vibe": 12813, + "vibes": 7764, + "vibr": 9527, + "vibrant": 14270, + "vibration": 37456, + "vibrations": 43660, + "vic": 1555, + "vic": 4412, + "vica": 46168, + "vicar": 43899, + "vice": 43572, + "vice": 6931, + "vicente": 39411, + "vices": 8332, + "vich": 24143, + "vici": 46670, + "vicious": 25177, + "vick": 15116, + "vick": 29704, + "vickers": 48452, + "vicki": 34927, + "vicky": 37176, + "vicky": 25788, + "victi": 6861, + "victim": 9133, + "victims": 7131, + "victor": 2423, + "victor": 10690, + "victori": 17555, + "victoria": 39286, + "victoria": 6127, + "victorian": 12350, + "victorias": 47791, + "victories": 24577, + "victorious": 24033, + "victory": 36668, + "victory": 4127, + "vid": 17233, + "vid": 9284, + "vida": 19015, + "vidal": 36678, + "vide": 1334, + "vide": 45244, + "video": 9478, + "video": 1455, + "videogame": 35097, + "videogames": 21149, + "videos": 6081, + "vids": 23035, + "vidy": 29639, + "vidya": 45264, + "vie": 922, + "vie": 8538, + "vien": 36493, + "vienna": 12670, + "vier": 15352, + "vier": 11987, + "viera": 21114, + "viernes": 33826, + "vies": 22458, + "viest": 31979, + "viet": 17558, + "viet": 13128, + "vietnam": 19558, + "vietnam": 8623, + "vietnamese": 22382, + "view": 12004, + "view": 1093, + "viewed": 7226, + "viewer": 15061, + "viewers": 14275, + "viewing": 7124, + "viewpoint": 41604, + "views": 2758, + "vig": 8549, + "vig": 45083, + "vigil": 21538, + "vigil": 19896, + "vigilant": 43026, + "vigne": 40447, + "vigne": 34581, + "vigo": 44097, + "vigor": 26781, + "vii": 17759, + "viii": 20414, + "vijay": 12014, + "vijay": 10823, + "vijaysethu": 47966, + "vik": 10764, + "vik": 17181, + "vika": 39562, + "vikas": 37116, + "viking": 26663, + "viking": 15897, + "vikings": 11713, + "vikram": 41136, + "vikram": 24314, + "viktor": 36101, + "vil": 1338, + "vil": 3000, + "vila": 37505, + "vile": 27247, + "vill": 10481, + "vill": 45698, + "villa": 3203, + "villa": 7754, + "village": 34584, + "village": 4331, + "villagers": 34283, + "villages": 17621, + "villain": 15425, + "villains": 25271, + "villanova": 44025, + "villar": 35164, + "villas": 28907, + "ville": 11110, + "ville": 1930, + "villen": 46177, + "villi": 36907, + "vimeo": 48720, + "vin": 1379, + "vin": 2558, + "vina": 35682, + "vinai": 37396, + "vinaigrette": 39876, + "vinay": 43952, + "vince": 32429, + "vince": 6236, + "vincen": 33402, + "vincent": 29069, + "vincent": 10357, + "vinci": 30199, + "vind": 20275, + "vindic": 39582, + "vine": 8471, + "vine": 7721, + "vinegar": 23834, + "vines": 21268, + "vineyard": 16527, + "vineyards": 23082, + "ving": 5375, + "ving": 903, + "vingne": 42579, + "vings": 22510, + "vini": 48119, + "vinnie": 40885, + "vinny": 36794, + "vino": 14509, + "vinod": 43348, + "vins": 34820, + "vinson": 45945, + "vintag": 10936, + "vintage": 13654, + "vintage": 3266, + "viny": 40990, + "vinyl": 22835, + "vinyl": 5754, + "vio": 11913, + "vio": 20324, + "viol": 3164, + "viola": 27438, + "violate": 44875, + "violated": 38192, + "violating": 37554, + "violation": 22919, + "violations": 21969, + "violence": 5450, + "violent": 11565, + "violently": 47758, + "violet": 16118, + "violets": 42861, + "violin": 17058, + "violinist": 36299, + "vion": 35496, + "vious": 6418, + "viously": 7149, + "vip": 45714, + "vip": 7111, + "viper": 27401, + "vips": 41149, + "vir": 1790, + "vir": 25319, + "vira": 35910, + "viral": 11653, + "virat": 32473, + "virgil": 39076, + "virgin": 5651, + "virgin": 12103, + "virgini": 43426, + "virginia": 6728, + "virgo": 39978, + "viro": 32301, + "viron": 38309, + "virtu": 7977, + "virtual": 18059, + "virtual": 7790, + "virtually": 22475, + "virtualreality": 32608, + "virtue": 26860, + "virtues": 42167, + "virtuoso": 47027, + "virus": 11808, + "viruses": 34830, + "vis": 1301, + "vis": 5337, + "visa": 12802, + "visas": 41228, + "vise": 24977, + "vised": 14810, + "vish": 12024, + "vish": 29124, + "vishal": 33648, + "vishnu": 37816, + "visi": 1409, + "visibility": 15921, + "visible": 36658, + "visible": 8626, + "vising": 37439, + "vision": 11147, + "vision": 2515, + "visional": 24627, + "visionary": 22959, + "visions": 13804, + "visit": 3388, + "visit": 1600, + "visitation": 44370, + "visited": 5580, + "visiting": 4680, + "visitor": 13881, + "visitors": 9160, + "visits": 8489, + "visitscotland": 28760, + "visitspain": 48860, + "vism": 15514, + "viso": 46732, + "visor": 24217, + "vist": 21436, + "vista": 13865, + "visu": 7739, + "visual": 17004, + "visual": 7195, + "visualization": 28500, + "visualize": 45057, + "visually": 25743, + "visuals": 21315, + "viswas": 36513, + "viswasam": 47664, + "vit": 4056, + "vit": 35580, + "vita": 15700, + "vital": 32525, + "vital": 10585, + "vitality": 36385, + "vitam": 9856, + "vitamin": 13675, + "vitamins": 22582, + "vito": 36725, + "vity": 4893, + "vitz": 26188, + "vius": 41571, + "viv": 21827, + "viv": 35363, + "viva": 17399, + "vival": 35920, + "vive": 18980, + "vive": 24004, + "vivek": 36243, + "vivi": 11625, + "vivian": 30129, + "vivid": 22984, + "vivo": 28091, + "vivo": 25888, + "vix": 28976, + "vix": 34811, + "vixen": 38757, + "vixx": 32106, + "viz": 28251, + "viz": 31786, + "vj": 45439, + "vj": 30827, + "vk": 41893, + "vl": 37580, + "vl": 36442, + "vla": 23686, + "vlad": 41089, + "vladi": 19320, + "vladimir": 21702, + "vlive": 46797, + "vlog": 18894, + "vm": 16204, + "vm": 20269, + "vma": 35666, + "vmas": 30236, + "vmware": 29615, + "vn": 47098, + "vn": 25076, + "vo": 947, + "vo": 3951, + "voc": 4105, + "voc": 20855, + "vocab": 21346, + "vocabulary": 23804, + "vocal": 34037, + "vocal": 13147, + "vocali": 19134, + "vocalist": 22102, + "vocals": 17666, + "vocation": 20521, + "vocational": 33751, + "vod": 11820, + "vod": 35854, + "vodaf": 28436, + "vodafone": 38695, + "vodka": 13646, + "vogel": 44960, + "vogue": 24418, + "vogue": 13178, + "voic": 29185, + "voice": 13179, + "voice": 3386, + "voiced": 34352, + "voiceof": 44966, + "voiceover": 41979, + "voices": 9144, + "void": 21561, + "voip": 42762, + "voir": 16036, + "vol": 1343, + "vol": 7945, + "volatile": 41022, + "volatility": 32355, + "volcan": 9916, + "volcanic": 24072, + "volcano": 14581, + "volcanoes": 38055, + "voli": 40138, + "volk": 13432, + "volkswag": 14407, + "volkswagen": 15342, + "volley": 7130, + "volley": 34656, + "volleyball": 7458, + "volo": 44791, + "vols": 20404, + "volt": 26430, + "volta": 29879, + "volta": 33480, + "voltage": 23118, + "voltron": 39314, + "volu": 3563, + "volume": 8284, + "volumes": 22651, + "volun": 3356, + "voluntar": 48823, + "voluntary": 23815, + "volunte": 3556, + "volunteer": 32331, + "volunteer": 7114, + "volunteered": 34000, + "volunteering": 14902, + "volunteers": 5939, + "volution": 24043, + "volved": 42888, + "volvo": 39991, + "volvo": 16906, + "vom": 24198, + "vomit": 46485, + "von": 11269, + "von": 8497, + "voo": 19497, + "voodoo": 26869, + "voor": 34291, + "voor": 34464, + "vor": 8338, + "vor": 5308, + "vore": 18215, + "vortex": 30071, + "vos": 16863, + "vot": 48558, + "vote": 6830, + "vote": 2187, + "voted": 6454, + "votel": 41379, + "voter": 44474, + "voter": 14065, + "voters": 8925, + "votes": 6693, + "voting": 5756, + "vou": 11045, + "voucher": 18190, + "vouchers": 23384, + "vous": 10636, + "vow": 34787, + "vows": 21677, + "vox": 29215, + "vox": 22692, + "voy": 10622, + "voy": 15021, + "voyage": 16299, + "voyager": 29669, + "vp": 32758, + "vp": 3896, + "vpn": 38212, + "vr": 16840, + "vr": 5921, + "vre": 44500, + "vre": 17501, + "vs": 11385, + "vs": 1547, + "vsco": 26752, + "vsco": 32822, + "vscocam": 34694, + "vsky": 37791, + "vss": 31919, + "vt": 31732, + "vt": 10291, + "vu": 8664, + "vu": 13230, + "vue": 43915, + "vue": 19313, + "vuel": 31312, + "vuelta": 43856, + "vuitton": 26705, + "vul": 6856, + "vulcan": 34767, + "vulner": 11213, + "vulnerability": 28797, + "vulnerable": 14332, + "vulture": 34593, + "vultures": 47197, + "vv": 19264, + "vv": 35686, + "vw": 28650, + "vw": 13250, + "vx": 47644, + "vy": 11566, + "vy": 5157, + "w": 86, + "w": 342, + "wa": 869, + "wa": 2663, + "waa": 35874, + "wab": 19893, + "wab": 36852, + "wac": 27445, + "wac": 37947, + "wack": 22880, + "wack": 38270, + "wacky": 34318, + "waco": 36035, + "wad": 11133, + "wad": 30451, + "wada": 40006, + "wade": 40237, + "wade": 14180, + "wadi": 37253, + "waf": 17638, + "wafc": 49086, + "waff": 13940, + "waffle": 20375, + "waffles": 24205, + "wag": 5764, + "wag": 19177, + "wage": 10716, + "wager": 43430, + "wages": 19114, + "wagner": 18081, + "wagon": 13260, + "wagons": 47944, + "wags": 48580, + "wah": 24812, + "wah": 18014, + "wahl": 27500, + "wahlberg": 35151, + "wahoo": 47995, + "wai": 11469, + "wai": 21569, + "waifu": 46551, + "waikiki": 44907, + "wain": 28358, + "wain": 20120, + "wainwright": 45878, + "waist": 36946, + "waist": 18459, + "wait": 10021, + "wait": 1885, + "waite": 24272, + "waited": 18492, + "waiter": 32946, + "waitin": 44482, + "waiting": 2680, + "waitress": 39760, + "waitrose": 37164, + "waits": 21361, + "waiver": 42866, + "waj": 49367, + "wak": 11172, + "wak": 36015, + "waka": 42696, + "wake": 10501, + "wake": 5731, + "wakefield": 26358, + "wakes": 29108, + "wakeup": 26328, + "wakeup": 35380, + "wakeupamerica": 37474, + "waking": 13025, + "wal": 1056, + "wal": 6903, + "wala": 16468, + "walang": 49180, + "walcott": 45744, + "wald": 46930, + "wald": 15724, + "walden": 39311, + "waldo": 32440, + "waldorf": 38227, + "wale": 41247, + "wale": 20336, + "wales": 25383, + "wales": 5110, + "walgreens": 38490, + "wali": 37576, + "wali": 14768, + "walia": 44455, + "walk": 8588, + "walk": 2374, + "walkaway": 48255, + "walked": 8667, + "walker": 24735, + "walker": 6150, + "walkers": 23366, + "walkin": 45792, + "walking": 12644, + "walking": 3941, + "walkingdead": 14948, + "walkout": 47470, + "walks": 8192, + "walkway": 36614, + "wall": 4316, + "wall": 2569, + "walla": 26007, + "walla": 39982, + "wallabies": 48926, + "wallace": 12535, + "wallart": 36223, + "walled": 36567, + "waller": 45340, + "wallet": 12154, + "wallets": 38550, + "walleye": 49099, + "wallis": 42206, + "wallpaper": 10560, + "wallpapers": 29841, + "walls": 8258, + "wallstreet": 45341, + "wally": 26024, + "walmart": 11972, + "walnut": 16310, + "walnuts": 38294, + "walsall": 42935, + "walsh": 12856, + "walt": 23535, + "walt": 14312, + "waltdisneyworld": 36505, + "walter": 31156, + "walter": 10645, + "walters": 25532, + "waltham": 42742, + "waltham": 45581, + "walton": 19485, + "waltz": 35982, + "wam": 20503, + "wamy": 46970, + "wan": 2060, + "wan": 4557, + "wana": 30830, + "wand": 14636, + "wand": 28559, + "wanda": 25070, + "wander": 12985, + "wander": 24473, + "wandered": 46593, + "wanderers": 27540, + "wandering": 22597, + "wanderlust": 16129, + "wane": 27459, + "wang": 19731, + "wang": 11900, + "wani": 21674, + "wankers": 42189, + "wann": 23622, + "wanna": 35940, + "wanna": 3836, + "wannabe": 40730, + "wannaone": 44832, + "want": 18356, + "want": 1280, + "wanted": 3146, + "wanting": 12801, + "wants": 3107, + "wap": 27393, + "wap": 30368, + "waq": 47512, + "war": 984, + "war": 2238, + "wara": 21631, + "warbler": 33891, + "warcraft": 13660, + "ward": 7728, + "ward": 1460, + "warden": 27798, + "wardly": 30780, + "wardro": 14247, + "wardrobe": 15020, + "wards": 2593, + "ware": 7416, + "ware": 4476, + "wareagle": 35716, + "warehouse": 13054, + "wareness": 41601, + "wareness": 35870, + "wares": 30692, + "warfare": 15739, + "warhammer": 26832, + "warhol": 27554, + "wari": 20977, + "wark": 46346, + "wark": 15164, + "warlock": 42455, + "warm": 14725, + "warm": 3616, + "warmed": 36695, + "warmer": 14328, + "warmest": 30910, + "warming": 8606, + "warmly": 45322, + "warmongers": 33205, + "warms": 32917, + "warmth": 19636, + "warmup": 29904, + "warmups": 44094, + "warn": 19360, + "warned": 16409, + "warner": 28564, + "warner": 13402, + "warning": 4994, + "warnings": 18098, + "warns": 14086, + "waron": 38947, + "warp": 32411, + "warped": 32125, + "warran": 17392, + "warrant": 22554, + "warrants": 45677, + "warranty": 23999, + "warren": 23143, + "warren": 9234, + "warri": 4109, + "warrington": 31203, + "warrior": 18998, + "warrior": 8148, + "warriors": 6421, + "wars": 3931, + "warsaw": 21072, + "warship": 47846, + "wart": 43535, + "wart": 7346, + "wartime": 42998, + "warts": 21781, + "warwick": 23081, + "warwick": 22215, + "warwickshire": 36766, + "wary": 36213, + "was": 3398, + "was": 739, + "wasabi": 47334, + "wash": 3363, + "wash": 7810, + "washed": 14092, + "washer": 24085, + "washes": 38950, + "washing": 13029, + "washington": 16774, + "washington": 4365, + "washingtondc": 40225, + "washingtonpost": 28426, + "wasn": 5044, + "wasnt": 29607, + "wasp": 24889, + "wasps": 35300, + "wassup": 45708, + "wast": 28886, + "waste": 18157, + "waste": 6065, + "wasted": 18278, + "wasteland": 44035, + "wastewater": 34463, + "wasting": 25577, + "wat": 800, + "wat": 10621, + "wata": 42509, + "watch": 7046, + "watch": 1239, + "watchdog": 35303, + "watched": 5775, + "watcher": 35971, + "watchers": 28443, + "watches": 9521, + "watchin": 32432, + "watching": 2113, + "water": 2505, + "water": 1573, + "watercolor": 14211, + "watercolour": 18377, + "waterfall": 16403, + "waterfalls": 26692, + "waterford": 24448, + "waterfront": 16605, + "waterhouse": 45072, + "watering": 19871, + "waterloo": 17465, + "watermelon": 19889, + "waterproof": 17613, + "waters": 7753, + "watershed": 33204, + "waterstones": 45014, + "waterways": 37395, + "watford": 23162, + "watfordfc": 37328, + "wati": 27966, + "watkins": 22539, + "watson": 35490, + "watson": 9294, + "watt": 22899, + "watt": 15805, + "wattpad": 32351, + "watts": 14750, + "wau": 9479, + "wav": 6054, + "wave": 17530, + "wave": 4535, + "waved": 44657, + "waver": 25997, + "waves": 7882, + "waving": 26545, + "wavy": 31941, + "waw": 22039, + "wawrinka": 48414, + "wawx": 47387, + "wax": 18789, + "wax": 11910, + "waxing": 38781, + "way": 3079, + "way": 923, + "wayback": 47822, + "wayne": 23632, + "wayne": 7003, + "ways": 1248, + "waz": 20889, + "waz": 48835, + "wb": 10726, + "wb": 12377, + "wba": 22675, + "wbb": 14482, + "wbc": 26745, + "wbo": 49053, + "wbz": 35471, + "wc": 4842, + "wc": 5755, + "wcc": 47166, + "wcc": 34926, + "wcpo": 46624, + "wcs": 39916, + "wcvb": 32709, + "wcw": 9041, + "wd": 15998, + "wd": 7494, + "wdw": 40334, + "we": 598, + "we": 649, + "wea": 37146, + "wea": 47301, + "weak": 12128, + "weak": 10128, + "weaker": 39735, + "weakness": 21448, + "weaknesses": 43487, + "weal": 14759, + "wealth": 33150, + "wealth": 7904, + "wealthy": 22617, + "weap": 6156, + "weapon": 42612, + "weapon": 10537, + "weapons": 10007, + "wear": 12206, + "wear": 2839, + "wearab": 22983, + "wearable": 44943, + "wearable": 24973, + "wearables": 30319, + "weare": 4264, + "weare": 27867, + "weareall": 45980, + "wearec": 43620, + "wearen": 45635, + "weareone": 16149, + "weareoneexo": 16448, + "wearethe": 40242, + "wearing": 3309, + "wears": 11869, + "weary": 38766, + "weasel": 44308, + "weather": 8808, + "weather": 2237, + "weathercee": 44980, + "weatherchannel": 42138, + "weav": 22260, + "weave": 22450, + "weaver": 20297, + "weaving": 27131, + "web": 2055, + "web": 4601, + "webb": 15708, + "webber": 34248, + "webcam": 24211, + "webcam": 22589, + "webcamtoy": 27719, + "webcast": 28256, + "webcomic": 34286, + "webcomics": 39811, + "webdesign": 20470, + "webdev": 37000, + "webdevelopment": 47553, + "weber": 20179, + "webin": 8460, + "webinar": 8921, + "webinars": 47755, + "webpage": 46964, + "webs": 32829, + "webseries": 44819, + "website": 3364, + "websites": 19278, + "webster": 19471, + "websummit": 48069, + "wec": 33152, + "wechat": 46124, + "wed": 1687, + "wed": 3478, + "wedd": 7576, + "wedding": 11204, + "wedding": 3101, + "weddings": 15964, + "wedge": 21446, + "wedges": 33179, + "wedne": 2380, + "wednesday": 9311, + "wednesday": 2689, + "wednesdaymotivation": 37860, + "wednesdays": 24943, + "wednesdaywisdom": 11445, + "wedo": 43432, + "weds": 19107, + "wee": 716, + "wee": 8288, + "weed": 36935, + "weed": 8015, + "weeds": 26326, + "week": 1286, + "week": 994, + "weekday": 29244, + "weekdays": 44330, + "weekend": 17205, + "weekend": 1456, + "weekender": 36547, + "weekends": 14564, + "weekly": 34652, + "weekly": 5885, + "weeknd": 29925, + "weeks": 2898, + "weeksary": 24628, + "ween": 17517, + "ween": 1599, + "weep": 39270, + "weeping": 36629, + "weer": 32491, + "weet": 17742, + "weets": 13454, + "wef": 23313, + "weg": 47867, + "weg": 47561, + "wego": 44784, + "wego": 28220, + "weh": 48458, + "weh": 40313, + "weho": 47798, + "wei": 6958, + "wei": 20952, + "weibo": 20613, + "weigh": 10565, + "weigh": 17346, + "weighed": 33210, + "weighing": 24455, + "weighs": 20481, + "weight": 12723, + "weight": 3868, + "weighted": 43179, + "weightlifting": 36164, + "weightloss": 20359, + "weights": 21374, + "weil": 43720, + "weiler": 42203, + "wein": 29134, + "wein": 37684, + "weiner": 38822, + "weinstein": 34367, + "weir": 11299, + "weir": 25517, + "weird": 27981, + "weird": 5613, + "weirdest": 29482, + "weirdo": 32476, + "weis": 26251, + "weiser": 34833, + "weiss": 24794, + "wel": 1267, + "wel": 8042, + "welch": 25820, + "welcom": 11578, + "welcome": 18318, + "welcome": 1881, + "welcomed": 12590, + "welcomes": 9304, + "welcometo": 47511, + "welcoming": 8775, + "weld": 39776, + "welding": 24956, + "welfare": 12129, + "well": 3277, + "well": 1123, + "wellbeing": 14273, + "weller": 40921, + "welling": 49165, + "wellington": 15389, + "wellness": 40574, + "wellness": 9904, + "wells": 42705, + "wells": 9804, + "welove": 13573, + "welp": 28391, + "wels": 20852, + "welsh": 19173, + "welsh": 10977, + "welt": 38595, + "welter": 37115, + "welterweight": 39617, + "wemb": 15213, + "wembley": 16579, + "wen": 6590, + "wen": 11278, + "wend": 15166, + "wendell": 42091, + "wendy": 31616, + "wendy": 14074, + "wenger": 21105, + "went": 18633, + "went": 2437, + "wentworth": 36423, + "wentz": 39179, + "wer": 6316, + "wer": 2980, + "were": 15461, + "were": 1365, + "wered": 6605, + "weren": 13611, + "werewolf": 32001, + "werk": 30176, + "werner": 29917, + "wers": 7110, + "wes": 18620, + "wes": 14738, + "wesle": 29606, + "wesley": 17332, + "wesleyan": 32509, + "wesome": 33292, + "wess": 44431, + "west": 2973, + "west": 1593, + "westbound": 29208, + "westbrook": 26948, + "westchester": 36675, + "westcoast": 44610, + "westend": 44815, + "wester": 9846, + "western": 17079, + "western": 4463, + "westfield": 32309, + "westh": 36798, + "westin": 43232, + "westlake": 41535, + "westminster": 15158, + "weston": 22771, + "westside": 33762, + "westwood": 26371, + "westworld": 42287, + "wet": 12406, + "wet": 6682, + "weta": 40946, + "wethenorth": 45281, + "wethepeople": 48030, + "wether": 33794, + "wether": 48405, + "wetland": 37357, + "wetlands": 26547, + "wett": 41971, + "wetter": 43957, + "wewant": 39280, + "wewill": 37241, + "wex": 17234, + "wexford": 29876, + "wexmondays": 49042, + "wey": 30376, + "wey": 19781, + "weymouth": 41433, + "wf": 14576, + "wf": 22313, + "wfa": 44606, + "wfc": 36431, + "wfp": 35193, + "wftv": 47075, + "wg": 21091, + "wg": 25857, + "wga": 32354, + "wgn": 48828, + "wh": 573, + "wh": 13844, + "wha": 18994, + "wha": 25884, + "whal": 38967, + "whale": 37083, + "whale": 11650, + "whales": 17722, + "wham": 42506, + "whar": 15517, + "wharf": 22452, + "wharton": 43320, + "what": 4268, + "what": 768, + "whatcha": 37160, + "whate": 6695, + "whatever": 6743, + "whati": 23500, + "whats": 9263, + "whats": 13084, + "whatsapp": 10119, + "whatsoever": 39928, + "whatson": 35632, + "whatyou": 30508, + "whe": 2009, + "whead": 34583, + "wheat": 20505, + "wheat": 10303, + "wheaton": 46933, + "wheel": 7360, + "wheel": 6744, + "wheelchair": 17713, + "wheeler": 18405, + "wheeling": 34839, + "wheels": 8025, + "whel": 9792, + "whelan": 40715, + "when": 8753, + "when": 827, + "whenever": 10500, + "where": 7052, + "where": 1234, + "whereabouts": 47808, + "whereas": 42234, + "wheres": 46345, + "wherever": 14103, + "whereyou": 46837, + "whether": 5903, + "whew": 39016, + "whey": 34556, + "whi": 4295, + "whi": 33129, + "which": 1448, + "whiche": 48719, + "whichever": 49138, + "whil": 8499, + "while": 1519, + "whilst": 8596, + "whim": 27766, + "whimsical": 42282, + "whip": 14412, + "whipped": 22323, + "whipping": 41567, + "whir": 20873, + "whirl": 30962, + "whirlwind": 47771, + "whis": 6024, + "whiskey": 41381, + "whiskey": 11610, + "whisky": 37567, + "whisky": 12599, + "whisp": 21986, + "whispe": 30356, + "whisper": 27616, + "whisperer": 41368, + "whispering": 42599, + "whispers": 29133, + "whist": 13640, + "whistle": 23972, + "whistle": 19746, + "whistleblower": 40410, + "whistler": 29633, + "whit": 4398, + "whit": 31498, + "whitaker": 35851, + "whitby": 30858, + "white": 4699, + "white": 1579, + "whiteboard": 40839, + "whitec": 24575, + "whitehall": 42827, + "whitehead": 43560, + "whitehouse": 20776, + "whitening": 35540, + "whitepaper": 42713, + "whites": 35886, + "whites": 18835, + "whitesox": 28816, + "whitewater": 49350, + "whitfield": 48404, + "whitley": 40564, + "whitman": 32394, + "whitney": 43021, + "whitney": 18048, + "whitt": 33784, + "whittaker": 47595, + "whl": 25801, + "who": 2969, + "who": 822, + "whoa": 16943, + "whoever": 11137, + "whois": 41884, + "whole": 10360, + "whole": 2954, + "wholefoods": 42840, + "wholesale": 18306, + "wholesome": 35959, + "whom": 38158, + "whom": 12873, + "whoo": 20003, + "whoo": 49290, + "whoop": 22060, + "whoops": 28433, + "whopping": 34384, + "whore": 31690, + "whos": 41460, + "whos": 27130, + "whose": 6933, + "whouse": 45927, + "whs": 26292, + "wht": 32470, + "whufc": 31695, + "whun": 18272, + "why": 11040, + "why": 1182, + "whyte": 42386, + "wi": 820, + "wi": 5585, + "wib": 45303, + "wic": 7834, + "wich": 9759, + "wich": 5238, + "wichita": 22566, + "wick": 6798, + "wick": 6479, + "wicked": 32579, + "wicked": 12825, + "wicker": 38096, + "wicket": 19180, + "wickets": 22110, + "wicklow": 39039, + "wicz": 30121, + "wid": 11886, + "wid": 20886, + "wide": 19341, + "wide": 3184, + "widely": 16195, + "widening": 46598, + "wider": 21263, + "widesp": 20598, + "widespread": 21258, + "widget": 43906, + "wido": 28068, + "widow": 19949, + "widows": 42129, + "width": 23571, + "wie": 21378, + "wie": 9131, + "wielding": 47272, + "wien": 38131, + "wiener": 40567, + "wies": 42788, + "wif": 37572, + "wife": 3607, + "wifey": 35282, + "wifi": 11026, + "wig": 23690, + "wig": 12216, + "wigan": 23130, + "wiggins": 32329, + "wiggle": 47812, + "wight": 41278, + "wight": 15545, + "wigs": 31207, + "wii": 8005, + "wiiu": 40980, + "wiki": 10373, + "wiki": 24265, + "wikileaks": 28731, + "wikipedia": 15176, + "wil": 1352, + "wil": 20581, + "wilbur": 43069, + "wilcox": 43231, + "wild": 2780, + "wild": 3220, + "wildatlantic": 35500, + "wildatlanticway": 35776, + "wildcard": 37360, + "wildcat": 49077, + "wildcat": 25870, + "wildcats": 15909, + "wilde": 23498, + "wilder": 14343, + "wilder": 23499, + "wilderness": 16506, + "wildest": 43028, + "wildfire": 22788, + "wildfires": 29184, + "wildflower": 27628, + "wildflower": 33181, + "wildflowerhour": 31302, + "wildflowers": 29136, + "wildlife": 13298, + "wildlife": 5250, + "wildlifephotography": 32307, + "wildlifewednesday": 48537, + "wildly": 35981, + "wildoz": 40113, + "wiley": 32747, + "wilhelm": 39696, + "wilkes": 39548, + "wilkins": 36986, + "wilkinson": 26797, + "will": 5062, + "will": 751, + "willam": 43276, + "willard": 44920, + "wille": 48739, + "willem": 38044, + "willi": 2256, + "william": 8420, + "william": 4705, + "williams": 38452, + "williams": 4075, + "williamsburg": 30683, + "williamson": 20793, + "willie": 13907, + "willing": 34160, + "willing": 11718, + "willingness": 40573, + "willis": 18491, + "willow": 33887, + "willow": 15665, + "wills": 26913, + "willy": 34502, + "willy": 19599, + "wilmington": 28052, + "wilms": 47879, + "wilshere": 48359, + "wilson": 23629, + "wilson": 5622, + "wilt": 23394, + "wilt": 47357, + "wilton": 46638, + "wiltshire": 28025, + "wim": 8662, + "wim": 27580, + "wimble": 11752, + "wimbledon": 12229, + "win": 831, + "win": 1225, + "winchester": 20647, + "wind": 6812, + "wind": 3630, + "winder": 44454, + "winder": 46245, + "winding": 22390, + "windmill": 34084, + "windo": 3110, + "window": 26675, + "window": 4879, + "windows": 5437, + "winds": 12668, + "winds": 7012, + "windshield": 33002, + "windsor": 44322, + "windsor": 12884, + "windy": 13446, + "wine": 7375, + "wine": 2604, + "winelover": 26357, + "winemaker": 41588, + "wineoclock": 43846, + "wineries": 49349, + "winery": 15500, + "wines": 8263, + "winetasting": 41288, + "winewednesday": 35447, + "wing": 8141, + "wing": 1340, + "winged": 24993, + "winger": 22727, + "winget": 44578, + "wings": 5178, + "wink": 34455, + "wink": 25859, + "winkle": 36430, + "winn": 38104, + "winne": 46273, + "winner": 32961, + "winner": 2520, + "winners": 4320, + "winni": 13018, + "winnie": 29022, + "winning": 42099, + "winning": 2577, + "winnings": 46490, + "winnipeg": 14369, + "winona": 49202, + "wins": 46839, + "wins": 2718, + "winslow": 39658, + "winston": 14848, + "winter": 7340, + "winter": 2541, + "winters": 21587, + "wintry": 39504, + "wip": 10447, + "wipe": 26761, + "wiped": 31822, + "wipes": 33463, + "wir": 16849, + "wir": 44838, + "wire": 7558, + "wire": 7794, + "wired": 18935, + "wireless": 9103, + "wires": 24311, + "wiring": 36434, + "wirral": 34675, + "wis": 3392, + "wis": 20405, + "wiscon": 9857, + "wisconsin": 10265, + "wisdom": 42474, + "wisdom": 5425, + "wise": 19116, + "wise": 5558, + "wisely": 26173, + "wiser": 44859, + "wish": 11328, + "wish": 2412, + "wished": 25883, + "wishes": 6045, + "wishing": 5307, + "wishlist": 31969, + "wit": 584, + "wit": 8531, + "witch": 20139, + "witch": 10083, + "witchcraft": 35065, + "witcher": 33684, + "witches": 21673, + "with": 1435, + "with": 593, + "withdra": 24696, + "withdraw": 31670, + "withdrawal": 25765, + "withdrawn": 46687, + "withdraws": 48637, + "wither": 39655, + "witherspoon": 45409, + "within": 4154, + "withme": 44670, + "without": 32836, + "without": 2193, + "withstand": 42236, + "withthe": 36872, + "withus": 30572, + "withyou": 30351, + "witne": 12096, + "witness": 8793, + "witnessed": 20187, + "witnesses": 22778, + "witnessing": 33618, + "wits": 30938, + "witt": 38194, + "witt": 17168, + "witter": 31597, + "witty": 29970, + "witz": 44186, + "witz": 13265, + "wiv": 48925, + "wives": 14378, + "wiwx": 44461, + "wiz": 7730, + "wiz": 23178, + "wizar": 49121, + "wizard": 30490, + "wizard": 14295, + "wizards": 19140, + "wizkid": 40146, + "wj": 19739, + "wj": 35453, + "wk": 11512, + "wk": 11528, + "wkend": 42336, + "wknd": 20851, + "wks": 25508, + "wku": 43377, + "wl": 13299, + "wl": 9613, + "wm": 20268, + "wm": 15790, + "wn": 1186, + "wn": 757, + "wnba": 32358, + "wned": 8628, + "wns": 12950, + "wnt": 22484, + "wny": 24833, + "wo": 1613, + "wo": 11132, + "woah": 17751, + "wob": 35984, + "woc": 39011, + "wod": 41522, + "woes": 27860, + "wof": 45671, + "woj": 48931, + "wok": 28912, + "woke": 9331, + "woken": 43697, + "woking": 43931, + "wol": 2798, + "wol": 48622, + "wold": 42399, + "wolf": 9453, + "wolf": 5916, + "wolfe": 24989, + "wolff": 34369, + "wolfgang": 34061, + "wolfpack": 30887, + "wolve": 45101, + "wolver": 14334, + "wolverhampton": 34518, + "wolverine": 23353, + "wolverines": 42003, + "wolves": 9372, + "wom": 1087, + "womack": 48980, + "woman": 15716, + "woman": 2308, + "womanc": 35630, + "womancrush": 37721, + "womancrushwednesday": 39714, + "womanin": 30562, + "womaninbiz": 36482, + "womb": 37023, + "women": 3648, + "women": 1507, + "womenin": 13062, + "womeninscience": 41343, + "womeninstem": 29380, + "womenintech": 31470, + "womenof": 48421, + "womens": 12822, + "womens": 14408, + "womensart": 38548, + "womensday": 13956, + "womenshi": 22887, + "womenshistorymonth": 24982, + "womensmarch": 30102, + "won": 1528, + "won": 1749, + "wonder": 2070, + "wonder": 3936, + "wondercon": 46944, + "wondered": 15550, + "wonderful": 2582, + "wonderfully": 23245, + "wondering": 8360, + "wonderland": 13874, + "wonders": 14048, + "wonderwoman": 31000, + "wondo": 38402, + "wondr": 46771, + "wong": 17876, + "wonka": 43463, + "wont": 43174, + "wont": 15952, + "woo": 1867, + "woo": 9322, + "wood": 3269, + "wood": 1704, + "woodbridge": 49074, + "wooden": 48226, + "wooden": 9057, + "woodland": 44314, + "woodland": 17447, + "woodlands": 32430, + "woodley": 40566, + "woodpecker": 32684, + "woods": 6267, + "woodson": 48967, + "woodstock": 29486, + "woodward": 27419, + "woodwork": 47386, + "woodworking": 29267, + "woody": 38627, + "woody": 17144, + "woof": 34234, + "woof": 24028, + "woohoo": 20172, + "wook": 29192, + "wool": 9967, + "wool": 13283, + "woolf": 43728, + "woolly": 47722, + "woon": 33126, + "wooo": 43217, + "woop": 31884, + "woot": 22466, + "wor": 641, + "worcester": 22172, + "worcester": 19580, + "worcestershire": 38440, + "worcestershirehour": 43644, + "word": 8272, + "word": 2653, + "wordof": 33500, + "wordoftheday": 43594, + "wordpress": 15193, + "words": 31007, + "words": 2709, + "wore": 8953, + "work": 1636, + "work": 951, + "workday": 29735, + "worked": 5410, + "worker": 8098, + "workers": 4795, + "workflow": 28502, + "workforce": 14672, + "workin": 31825, + "workin": 26323, + "working": 20806, + "working": 1699, + "workinprogress": 46086, + "workout": 6773, + "workouts": 22779, + "workplace": 11959, + "workplaces": 47383, + "works": 2322, + "workshop": 3832, + "workshops": 12262, + "workspace": 34470, + "worl": 5221, + "world": 2334, + "world": 1002, + "worlda": 46627, + "worldbank": 36759, + "worldbookday": 31191, + "worldcup": 42525, + "worldcup": 8650, + "worlden": 44668, + "worldenviron": 47115, + "worldenvironmentday": 47522, + "worldly": 36268, + "worldo": 41698, + "worldof": 22636, + "worldre": 33951, + "worlds": 7691, + "worldseries": 26695, + "worldtour": 23202, + "worldwater": 41176, + "worldwaterday": 44520, + "worldwide": 6214, + "worm": 33709, + "worm": 10945, + "worms": 20231, + "worn": 9037, + "worried": 11911, + "worries": 17684, + "worry": 7534, + "worrying": 24058, + "worse": 8236, + "worsen": 46344, + "worshi": 31840, + "worship": 46399, + "worship": 9023, + "worst": 5719, + "wort": 30209, + "worth": 10671, + "worth": 2450, + "worthing": 39929, + "worthit": 40830, + "worthless": 44736, + "worths": 44633, + "worthwhile": 36295, + "worthy": 8881, + "worx": 44973, + "wot": 24863, + "wou": 5279, + "would": 39873, + "would": 1311, + "wouldn": 5878, + "wouldnt": 41595, + "wound": 19231, + "wounded": 14859, + "wounds": 21290, + "woven": 19830, + "wow": 22191, + "wow": 2781, + "woz": 44558, + "wozni": 47782, + "wp": 15378, + "wp": 13302, + "wpg": 35048, + "wps": 33386, + "wq": 45195, + "wr": 1189, + "wr": 8028, + "wra": 3852, + "wra": 46004, + "wral": 49050, + "wrangler": 30923, + "wrap": 7094, + "wrapped": 9875, + "wrapping": 15223, + "wraps": 18236, + "wrath": 29783, + "wray": 48943, + "wrc": 16004, + "wre": 3168, + "wreath": 23091, + "wrec": 20879, + "wreck": 28775, + "wreck": 15017, + "wrecked": 32695, + "wreckem": 45676, + "wrecking": 36956, + "wrecks": 45545, + "wren": 20191, + "wren": 31970, + "wrench": 30980, + "wrest": 4177, + "wrestle": 17097, + "wrestle": 28086, + "wrestlemania": 18849, + "wrestler": 19790, + "wrestlers": 25902, + "wrestling": 31292, + "wrestling": 5904, + "wrexham": 34479, + "wri": 7667, + "wri": 42007, + "wright": 28616, + "wright": 6991, + "wrights": 43711, + "wrigley": 33538, + "wrink": 22201, + "wrinkle": 46642, + "wrinkles": 35525, + "wrist": 19243, + "wrist": 16139, + "wristband": 36890, + "wristbands": 44864, + "writ": 2902, + "write": 28874, + "write": 4946, + "writer": 27886, + "writer": 4422, + "writers": 18742, + "writers": 7307, + "writerslife": 25007, + "writes": 8023, + "writing": 16053, + "writing": 2979, + "writingcommunity": 39178, + "writings": 36259, + "written": 5231, + "wro": 5447, + "wrong": 18381, + "wrong": 3669, + "wrongly": 45642, + "wrote": 5796, + "wrought": 48125, + "wrs": 45280, + "ws": 6300, + "ws": 799, + "wsb": 30681, + "wsbtv": 38394, + "wsj": 19764, + "wski": 12548, + "wsl": 43706, + "wsoc": 40253, + "wson": 33954, + "wsop": 41231, + "wsu": 44674, + "wsu": 32913, + "wsw": 43285, + "wt": 15873, + "wt": 12255, + "wta": 25984, + "wtc": 39718, + "wtf": 6891, + "wth": 23021, + "wthr": 45269, + "wti": 47345, + "wto": 36406, + "wts": 32159, + "wu": 9710, + "wu": 9837, + "wud": 43870, + "wul": 35154, + "wunder": 36661, + "wur": 24040, + "wurst": 44409, + "wusa": 40021, + "wut": 28590, + "wv": 18920, + "wv": 14743, + "wvu": 44878, + "wvu": 25879, + "ww": 3181, + "ww": 4491, + "wwc": 26505, + "wwdc": 47441, + "wwe": 12112, + "wwe": 5290, + "wwen": 23308, + "wwenetwork": 37228, + "wwenxt": 39898, + "wwer": 32038, + "wwf": 23332, + "wwfc": 42681, + "wwg": 35322, + "wwi": 20194, + "wwii": 10261, + "www": 26074, + "www": 9667, + "wwwbigbaldhead": 30761, + "wwww": 34224, + "wwww": 25200, + "wwwww": 48268, + "wwx": 47431, + "wx": 18192, + "wx": 3561, + "wy": 4665, + "wy": 7625, + "wyatt": 21660, + "wyd": 33113, + "wye": 48436, + "wye": 43751, + "wylie": 49330, + "wyn": 11802, + "wyn": 17504, + "wynn": 36117, + "wynne": 35951, + "wynonna": 41456, + "wynonnaearp": 43755, + "wyoming": 18693, + "x": 87, + "x": 343, + "xa": 24831, + "xan": 45530, + "xander": 45601, + "xavi": 36342, + "xavier": 41044, + "xavier": 18567, + "xb": 33678, + "xbox": 18063, + "xbox": 7748, + "xboxone": 27410, + "xc": 12515, + "xchange": 49132, + "xd": 6380, + "xe": 42886, + "xe": 19183, + "xen": 15568, + "xer": 49005, + "xf": 35274, + "xfactor": 25211, + "xfinity": 35107, + "xford": 34732, + "xh": 45771, + "xham": 25284, + "xi": 2467, + "xi": 7376, + "xia": 19854, + "xia": 20724, + "xian": 42570, + "xiao": 49318, + "xiaomi": 27477, + "xico": 38469, + "xide": 17398, + "xie": 40122, + "xie": 15976, + "xii": 36525, + "xiii": 28199, + "xim": 11217, + "xin": 27053, + "xin": 41517, + "xing": 14383, + "xion": 24164, + "xis": 35793, + "xit": 5316, + "xiumin": 36563, + "xiv": 16125, + "xj": 42453, + "xl": 36529, + "xl": 8833, + "xley": 38223, + "xm": 18626, + "xma": 48805, + "xmas": 48848, + "xmas": 6425, + "xmen": 28708, + "xn": 25388, + "xo": 26936, + "xo": 9000, + "xon": 29186, + "xon": 8482, + "xox": 11531, + "xox": 34050, + "xoxo": 13313, + "xp": 15651, + "xper": 32200, + "xperia": 37615, + "xpo": 44377, + "xpress": 31809, + "xq": 40606, + "xr": 26276, + "xrp": 26965, + "xs": 16397, + "xt": 1052, + "xtina": 45520, + "xton": 32666, + "xton": 10597, + "xtra": 26969, + "xtre": 27025, + "xtreme": 33483, + "xu": 42063, + "xu": 37198, + "xv": 17768, + "xvi": 44031, + "xx": 5675, + "xx": 3553, + "xxl": 29777, + "xxx": 33923, + "xxx": 8352, + "xxxx": 32035, + "xxxx": 22819, + "xxxxx": 44195, + "xy": 20023, + "xy": 11443, + "y": 88, + "y": 344, + "ya": 5018, + "ya": 1430, + "yaa": 48847, + "yaa": 34498, + "yaan": 34680, + "yab": 27737, + "yach": 9039, + "yacht": 43806, + "yacht": 12859, + "yachts": 29260, + "yad": 13276, + "yad": 40047, + "yadav": 26650, + "yaf": 38019, + "yag": 35081, + "yah": 16170, + "yah": 12381, + "yaho": 37929, + "yahoo": 38152, + "yahoo": 16846, + "yak": 11014, + "yak": 29074, + "yaki": 44677, + "yaku": 29572, + "yakuza": 42628, + "yal": 16198, + "yal": 13418, + "yale": 39926, + "yale": 17157, + "yall": 9210, + "yam": 6666, + "yam": 19318, + "yama": 23512, + "yamaha": 18854, + "yan": 3949, + "yan": 4788, + "yana": 18698, + "yand": 38609, + "yang": 23818, + "yang": 12605, + "yani": 26439, + "yankee": 21554, + "yankees": 11889, + "yann": 40246, + "yann": 38657, + "yao": 45231, + "yap": 48700, + "yap": 34468, + "yar": 6786, + "yar": 23071, + "yard": 20234, + "yard": 4313, + "yards": 7550, + "yarmouth": 45941, + "yarn": 19702, + "yarra": 46824, + "yas": 8168, + "yas": 20570, + "yash": 30216, + "yash": 37836, + "yasi": 37700, + "yasss": 23873, + "yat": 29443, + "yat": 34965, + "yates": 27677, + "yatra": 38932, + "yav": 41275, + "yaw": 31989, + "yawn": 48643, + "yay": 20614, + "yay": 6712, + "yaya": 37608, + "yaz": 19348, + "yaz": 42252, + "yb": 41785, + "yb": 27615, + "yc": 11931, + "ycle": 38089, + "yd": 29896, + "yd": 9534, + "yday": 15899, + "yds": 24819, + "ye": 693, + "ye": 4582, + "yea": 13687, + "yeah": 29405, + "yeah": 3908, + "year": 5163, + "year": 935, + "yearbook": 21636, + "yearling": 48392, + "yearly": 24541, + "yearof": 31944, + "yearofthe": 47899, + "years": 30864, + "years": 1151, + "yearsof": 14932, + "yearswith": 45249, + "yeast": 25819, + "yeats": 44903, + "yed": 28137, + "yed": 3301, + "yee": 18114, + "yee": 23108, + "yeezy": 24901, + "yeg": 16854, + "yeg": 11976, + "yegfood": 48711, + "yeh": 21331, + "yel": 3323, + "yel": 48164, + "yell": 30824, + "yelled": 39199, + "yelling": 26581, + "yellow": 12059, + "yellow": 4481, + "yellowstone": 29241, + "yelp": 31674, + "yemen": 29276, + "yemen": 12513, + "yemeni": 44656, + "yemi": 42267, + "yen": 29602, + "yen": 17960, + "yeo": 32292, + "yeo": 43830, + "yeol": 15808, + "yeon": 16602, + "yep": 10964, + "yer": 15491, + "yer": 2371, + "yers": 3722, + "yes": 21620, + "yes": 1958, + "yess": 42778, + "yess": 40189, + "yesss": 36210, + "yessss": 45620, + "yester": 1905, + "yesterday": 1926, + "yesterdays": 36238, + "yesung": 38527, + "yet": 2296, + "yeti": 34228, + "yev": 39855, + "yew": 34660, + "yey": 45447, + "yg": 16396, + "ygk": 44758, + "ygo": 46166, + "yh": 41978, + "yi": 5826, + "yi": 14762, + "yield": 16825, + "yields": 24856, + "yikes": 25094, + "yin": 26476, + "yin": 23543, + "ying": 42933, + "ying": 910, + "yixing": 32120, + "yk": 30965, + "yl": 2656, + "yl": 4045, + "ylan": 41875, + "ylde": 42850, + "yle": 32305, + "yle": 10770, + "ylene": 34239, + "yler": 48081, + "yles": 42860, + "ylon": 22375, + "ylor": 48468, + "ym": 1786, + "ym": 19587, + "yman": 29077, + "ymc": 47101, + "ymca": 22369, + "yment": 8199, + "ymes": 39968, + "ymi": 5271, + "ymm": 37133, + "ymoun": 41426, + "ymouth": 36429, + "yn": 2823, + "yn": 4100, + "yne": 18238, + "ynes": 18020, + "ynn": 10499, + "ynna": 48292, + "ynwa": 27372, + "yo": 586, + "yo": 3497, + "yoda": 31922, + "yof": 5966, + "yofficial": 21818, + "yofthe": 43983, + "yog": 34985, + "yog": 36539, + "yoga": 25872, + "yoga": 5523, + "yogh": 32626, + "yoghurt": 33491, + "yogi": 22766, + "yogur": 16137, + "yogurt": 16819, + "yoh": 48880, + "yoke": 41969, + "yoko": 25929, + "yoko": 32256, + "yokohama": 42409, + "yol": 19387, + "yol": 35218, + "yolanda": 43845, + "yolo": 20905, + "yom": 34718, + "yom": 44527, + "yon": 10147, + "yon": 7604, + "yong": 27960, + "yong": 20887, + "yonge": 48592, + "yoo": 25842, + "yoo": 20775, + "yoon": 30863, + "yoon": 22113, + "yoona": 32736, + "yoongi": 24037, + "yor": 2028, + "yor": 21132, + "york": 5318, + "york": 2705, + "yorker": 23865, + "yorkers": 41041, + "yorks": 39093, + "yorkshi": 43367, + "yorkshire": 27007, + "yorkshire": 8633, + "yoruba": 46083, + "yos": 35607, + "yosemite": 25893, + "yoshi": 22920, + "yoshi": 25354, + "yot": 22875, + "yotes": 46157, + "yotpo": 26113, + "you": 1562, + "you": 592, + "youare": 33879, + "youcan": 32498, + "youknow": 47919, + "youknow": 41088, + "youn": 1596, + "young": 6939, + "young": 1888, + "younger": 10414, + "youngest": 12316, + "youngjae": 46426, + "youngster": 35881, + "youngsters": 28098, + "younow": 33831, + "your": 2130, + "your": 695, + "youre": 28344, + "youre": 19695, + "yourown": 28583, + "yours": 3834, + "yourself": 3053, + "yourselves": 19747, + "youth": 10743, + "youth": 3281, + "youthful": 37480, + "youths": 23614, + "youts": 22737, + "youtu": 13868, + "youtube": 31258, + "youtube": 3895, + "youtuber": 24720, + "youtubers": 36822, + "youu": 35055, + "youuu": 35324, + "youuuu": 47123, + "yoy": 41865, + "yp": 38370, + "yp": 34734, + "ypg": 37386, + "yql": 46122, + "yqr": 36881, + "yr": 18395, + "yr": 4333, + "yrs": 4822, + "ys": 1971, + "ys": 961, + "yser": 33121, + "ysis": 4843, + "ysl": 45681, + "ysm": 23842, + "yst": 40528, + "yt": 36777, + "yt": 14779, + "ytd": 47524, + "yte": 48172, + "yu": 3371, + "yu": 8887, + "yuan": 26236, + "yuck": 48282, + "yugo": 48231, + "yuh": 42547, + "yui": 47932, + "yuk": 17037, + "yuk": 24063, + "yuki": 34010, + "yukon": 27094, + "yul": 39832, + "yum": 6869, + "yum": 7259, + "yuma": 47566, + "yummy": 7687, + "yun": 14976, + "yun": 18288, + "yung": 44545, + "yung": 17676, + "yunho": 39748, + "yup": 13231, + "yur": 42533, + "yuri": 23823, + "yusuf": 33222, + "yuv": 36784, + "yves": 33698, + "yvon": 23327, + "yvonne": 32583, + "yvr": 29058, + "yw": 33741, + "yx": 35624, + "yxe": 34240, + "yy": 3433, + "yy": 8321, + "yya": 37444, + "yyc": 27542, + "yyc": 11741, + "yyj": 26203, + "yyy": 11514, + "yyyy": 38749, + "yyyy": 16955, + "yyyyy": 26089, + "yyyyyy": 47055, + "yz": 37579, + "yz": 46451, + "yü": 48232, + "z": 89, + "z": 345, + "za": 3710, + "za": 2186, + "zab": 22982, + "zable": 37002, + "zac": 25501, + "zac": 19159, + "zach": 13401, + "zach": 11815, + "zachary": 32401, + "zack": 30567, + "zack": 19120, + "zad": 47314, + "zad": 27838, + "zada": 34889, + "zaf": 21837, + "zafar": 46668, + "zag": 26091, + "zag": 29346, + "zagre": 34107, + "zagreb": 35355, + "zah": 23258, + "zah": 43297, + "zaha": 44408, + "zai": 44329, + "zai": 27065, + "zain": 34400, + "zain": 45366, + "zak": 13050, + "zak": 20738, + "zaki": 48091, + "zal": 20552, + "zal": 33298, + "zam": 7218, + "zam": 41578, + "zambia": 21671, + "zan": 7284, + "zan": 17835, + "zana": 39643, + "zand": 37712, + "zane": 34786, + "zani": 45373, + "zania": 15059, + "zano": 27637, + "zanzi": 47835, + "zap": 24134, + "zapp": 33504, + "zappa": 46592, + "zar": 5458, + "zar": 16392, + "zara": 24454, + "zardari": 20174, + "zas": 48261, + "zation": 3683, + "zawa": 49281, + "zay": 7102, + "zayed": 36726, + "zayn": 22292, + "zayn": 10308, + "zaynmalik": 25278, + "zazzle": 47857, + "ze": 2254, + "ze": 1298, + "zeal": 44951, + "zealand": 7618, + "zeb": 46518, + "zebra": 47394, + "zebra": 22548, + "zed": 21047, + "zed": 1993, + "zedd": 45608, + "zee": 25468, + "zee": 14080, + "zeiss": 47460, + "zeit": 37898, + "zeit": 37906, + "zek": 40829, + "zeke": 47065, + "zel": 10389, + "zel": 12027, + "zelda": 17138, + "zell": 39526, + "zen": 8518, + "zen": 3928, + "zend": 33478, + "zendaya": 35956, + "zenith": 44740, + "zens": 15298, + "zeph": 40726, + "zepp": 22977, + "zeppelin": 25408, + "zer": 6118, + "zer": 3716, + "zero": 14867, + "zero": 5848, + "zers": 9547, + "zes": 4073, + "zest": 37709, + "zet": 34098, + "zeta": 30954, + "zetta": 45993, + "zeus": 32800, + "zey": 46647, + "zh": 33389, + "zh": 41621, + "zhang": 21127, + "zhen": 37374, + "zhen": 33236, + "zhou": 17384, + "zhu": 42049, + "zi": 2651, + "zi": 5819, + "zia": 13764, + "zid": 30235, + "zidane": 34643, + "zie": 29316, + "zie": 8956, + "zieg": 40157, + "ziegler": 46812, + "ziel": 32151, + "zier": 15399, + "zies": 38001, + "ziest": 28159, + "zig": 15950, + "zig": 21345, + "ziggy": 39274, + "zik": 30125, + "zika": 28783, + "zil": 25039, + "zil": 33190, + "zilla": 17879, + "zim": 8112, + "zim": 22577, + "zimbab": 12373, + "zimbabwe": 45668, + "zimbabwe": 13583, + "zimmer": 27452, + "zimmer": 35211, + "zimmerman": 38231, + "zin": 14085, + "zin": 21278, + "zinc": 27458, + "zind": 26206, + "zindabad": 42208, + "zine": 16100, + "zing": 25062, + "zing": 3152, + "zinger": 42027, + "zio": 13906, + "zion": 31763, + "zion": 20963, + "zione": 36161, + "zionist": 33078, + "zip": 26479, + "zip": 16083, + "zipper": 33670, + "zir": 31892, + "zl": 39168, + "zlat": 32489, + "zlatan": 37877, + "zm": 43691, + "zman": 24248, + "zn": 18004, + "zo": 4397, + "zo": 5056, + "zodi": 22660, + "zodiac": 27753, + "zoe": 43114, + "zoe": 16662, + "zoey": 39871, + "zog": 40680, + "zol": 25939, + "zola": 46105, + "zom": 6623, + "zombi": 29452, + "zombie": 11819, + "zombies": 46702, + "zombies": 16517, + "zon": 15109, + "zon": 14618, + "zona": 42134, + "zone": 37197, + "zone": 4442, + "zones": 17247, + "zoning": 36790, + "zoo": 8182, + "zoo": 7147, + "zoom": 32671, + "zoom": 13909, + "zor": 17605, + "zou": 38072, + "zr": 39275, + "zs": 35248, + "zshq": 41442, + "zt": 42629, + "zu": 4091, + "zu": 14184, + "zucchini": 29873, + "zucker": 26890, + "zuckerberg": 30066, + "zul": 31146, + "zulu": 32821, + "zum": 35094, + "zuma": 23326, + "zumba": 32976, + "zun": 42440, + "zur": 17128, + "zurich": 21288, + "zw": 42188, + "zx": 31604, + "zy": 6615, + "zy": 2303, + "zyk": 39112, + "zyme": 36472, + "zyn": 45287, + "zz": 1544, + "zz": 4943, + "zza": 14642, + "zzi": 13974, + "zzie": 18635, + "zzle": 7873, + "zzled": 39075, + "zzo": 14036, + "zzy": 21275, + "zzy": 8353, + "zzz": 20055, + "zzzz": 35742, + "zzzz": 43103, + "{": 90, + "{": 346, + "{}": 39025, + "|": 91, + "|#": 31183, + "|": 347, + "|@": 41677, + "||": 7566, + "}": 92, + "}": 348, + "~": 93, + "~!": 31181, + "~\"": 48442, + "~": 349, + "~>": 43291, + "~@": 44247, + "~~": 11461, + "~~": 16671, + "~~~": 32472, + "~~~~": 28295, + "¡": 94, + "¡": 350, + "¡ï¸ı": 15113, + "¡ï¸ı": 4174, + "¡ľ": 43991, + "¢": 95, + "¢": 351, + "£": 96, + "£": 352, + "£ï¸ı": 18446, + "¤": 97, + "¤": 353, + "¥": 98, + "¥": 354, + "¦": 99, + "¦": 355, + "¦Ī": 47615, + "§": 100, + "§": 356, + "¨": 101, + "¨": 357, + "©": 102, + "©": 358, + "ª": 103, + "ª": 359, + "«": 104, + "«": 360, + "¬": 105, + "¬": 361, + "¬ë": 31736, + "®": 106, + "®": 362, + "¯": 107, + "¯": 363, + "°": 108, + "°:": 21787, + "°": 364, + "°ï¸ı": 34777, + "±": 109, + "±": 365, + "±ï¸ı": 41020, + "²": 110, + "²": 366, + "³": 111, + "³": 367, + "³ï¸ı": 22195, + "³ï¸ı": 24706, + "´": 112, + "´": 368, + "µ": 113, + "µ": 369, + "µï¸ı": 27605, + "¶": 114, + "¶": 370, + "·": 115, + "·": 371, + "¸": 116, + "¸": 372, + "¸ë": 19693, + "¹": 117, + "¹": 373, + "º": 118, + "º": 374, + "»": 119, + "»": 375, + "¼": 120, + "¼": 376, + "½": 121, + "½": 377, + "½ï¸ı": 31333, + "¾": 122, + "¾": 378, + "¿": 123, + "¿": 379, + "À": 124, + "À": 380, + "Á": 125, + "Á": 381, + "Â": 126, + "Â": 382, + "¡": 26868, + "¡": 10830, + "¡¡": 45505, + "¢": 41359, + "£": 31117, + "£": 1950, + "Â¥": 20199, + "¨": 19957, + "¨¨": 23089, + "¨¨¨¨": 41223, + "©": 31148, + "©": 5811, + "«": 14434, + "®": 30857, + "®": 8436, + "¯": 38682, + "¯": 43593, + "¯\\": 44096, + "¯\\_(": 45115, + "°": 21305, + "°": 6858, + "²": 41175, + "´": 30560, + "´": 12559, + "·": 14844, + "º": 28059, + "»": 31642, + "»": 7599, + "½": 33613, + "¿": 44559, + "¿": 17133, + "ÂŃ": 22618, + "Ã": 127, + "Ã": 383, + "á": 7261, + "á": 22229, + "án": 38340, + "án": 21385, + "â": 26170, + "ã": 19339, + "ão": 21141, + "ä": 10896, + "ä": 47276, + "än": 42787, + "Ã¥": 23176, + "æ": 42495, + "ç": 10067, + "ça": 22711, + "è": 12138, + "è": 37761, + "ère": 30272, + "ès": 41210, + "é": 3459, + "é": 4166, + "éal": 45251, + "ée": 13489, + "és": 20507, + "ê": 27515, + "ë": 29526, + "ë": 40520, + "î": 48704, + "ï": 35689, + "ñ": 6445, + "ña": 17753, + "ño": 16574, + "ños": 40104, + "ó": 8891, + "ó": 27733, + "ón": 13926, + "ô": 26815, + "ö": 7255, + "ö": 37423, + "ör": 31762, + "ø": 17483, + "ø": 45598, + "ú": 17963, + "ú": 36019, + "ü": 6522, + "ü": 47177, + "ür": 26132, + "ÃĹ": 16165, + "Ãł": 36149, + "Ãł": 21259, + "ÃŃ": 8366, + "ÃŃ": 23928, + "ÃŃa": 16609, + "ÃŃn": 33623, + "Ä": 128, + "Ä": 384, + "ı": 18562, + "ı": 41901, + "Äģ": 23134, + "Äĩ": 31719, + "Äį": 45414, + "ÄŁ": 26540, + "Å": 129, + "Å": 385, + "Å¡": 35621, + "ÅĤ": 40419, + "Åį": 41267, + "ÅŁ": 21254, + "ÅŁ": 40706, + "Æ": 130, + "Æ": 386, + "Ç": 131, + "Ç": 387, + "È": 132, + "È": 388, + "É": 133, + "É": 389, + "Ê": 134, + "Ê": 390, + "Ë": 135, + "Ë": 391, + "Ì": 136, + "Ì": 392, + "Ìĩ": 16384, + "Í": 137, + "Í": 393, + "Î": 138, + "Î": 394, + "Ï": 139, + "Ï": 395, + "Ïī": 38065, + "Ð": 140, + "Ð": 396, + "а": 16912, + "а": 27080, + "аÐ": 31090, + "в": 39813, + "е": 22176, + "и": 16701, + "иÐ": 29503, + "к": 27152, + "л": 47611, + "м": 38018, + "н": 22705, + "о": 13506, + "о": 29386, + "оÐ": 20978, + "од": 38416, + "оÑĤ": 28599, + "п": 26302, + "пÑĢи": 46321, + "пÑĢиÑĢода": 48150, + "Ñ": 141, + "Ñ": 397, + "ÑĢ": 16370, + "ÑĢи": 41092, + "ÑĢод": 47039, + "ÑĢода": 47929, + "Ñģ": 23669, + "ÑĤ": 17875, + "Ñĥ": 39729, + "ÑĦ": 27993, + "ÑĦоÑĤ": 35155, + "ÑĦоÑĤо": 38981, + "Ñĭ": 45001, + "Ò": 142, + "Ò": 398, + "Ó": 143, + "Ó": 399, + "Ô": 144, + "Ô": 400, + "Õ": 145, + "Õ": 401, + "Ö": 146, + "Ö": 402, + "×": 147, + "×": 403, + "Ø": 148, + "Ø": 404, + "ا": 6042, + "ا": 22625, + "اØ": 13189, + "ار": 40137, + "اÙ": 8453, + "اÙĦ": 12973, + "اÙħ": 47626, + "اÙĨ": 42773, + "اÙĨ": 33200, + "ب": 16378, + "ب": 35330, + "Ø©": 20915, + "ت": 18197, + "ت": 44333, + "ج": 26375, + "Ø®": 41495, + "د": 19872, + "د": 35566, + "ر": 10948, + "ر": 24933, + "رÙĬ": 43273, + "ز": 36169, + "س": 17856, + "Ø´": 28770, + "ص": 27271, + "Ø·": 32050, + "ع": 18843, + "غ": 48510, + "ØŃ": 25722, + "Ù": 149, + "Ù": 405, + "Ùģ": 24112, + "ÙĤ": 27585, + "Ùĥ": 33499, + "ÙĦ": 14251, + "ÙĦ": 37899, + "Ùħ": 12986, + "Ùħ": 29945, + "ÙĨ": 16655, + "ÙĨ": 25386, + "Ùĩ": 34274, + "Ùĩ": 31343, + "ÙĪ": 12203, + "ÙĪ": 38310, + "ÙĪر": 48242, + "ÙĬ": 12046, + "ÙĬ": 23853, + "Ú": 150, + "Ú": 406, + "Ú©": 26475, + "Û": 151, + "Û": 407, + "Ûģ": 40480, + "ÛĮ": 21452, + "ÛĮ": 32703, + "Ü": 152, + "Ü": 408, + "Ý": 153, + "Ý": 409, + "Þ": 154, + "Þ": 410, + "ß": 155, + "ß": 411, + "à": 156, + "à": 412, + "à¤": 3124, + "त": 27263, + "द": 29552, + "न": 26090, + "प": 44149, + "ब": 43599, + "म": 48254, + "म": 26774, + "य": 37299, + "र": 39136, + "र": 19052, + "ल": 30881, + "व": 39545, + "श": 43181, + "स": 28505, + "ह": 29446, + "ा": 37973, + "ा": 13343, + "ि": 26721, + "à¤Ĥ": 30833, + "à¤ķ": 22067, + "à¤Ĺ": 42598, + "à¤ľ": 39561, + "à¥": 7410, + "à¥Ģ": 45791, + "à¥Ģ": 25751, + "à¥ģ": 39653, + "à¥ĩ": 48612, + "à¥ĩ": 25130, + "à¥ĭ": 34452, + "à¥į": 19389, + "à¦": 11322, + "া": 41532, + "à§": 26339, + "à¨": 15741, + "à©": 32086, + "àª": 22990, + "à«": 48347, + "à¬": 32791, + "à®": 6022, + "த": 34691, + "ன": 43394, + "ப": 47388, + "à®®": 35463, + "à®°": 43270, + "ல": 47705, + "ா": 32831, + "ி": 27126, + "à®ķ": 36168, + "à®Ł": 45263, + "à¯": 11259, + "à¯ģ": 33115, + "à¯į": 16631, + "à°": 12100, + "à±": 23550, + "à±į": 46098, + "à²": 9992, + "ಿ": 47797, + "à³": 20745, + "à³į": 36148, + "à´": 15418, + "àµ": 27392, + "àµį": 45266, + "à¶": 29881, + "à·": 30766, + "à¸": 1777, + "ม": 26137, + "ม": 29570, + "ย": 27241, + "ย": 33091, + "ร": 32225, + "ร": 27331, + "ล": 34696, + "ล": 32746, + "ว": 26990, + "ว": 30245, + "ส": 37883, + "ส": 35737, + "ห": 33064, + "ะ": 43920, + "ะ": 49234, + "ั": 14978, + "า": 11529, + "า": 38476, + "าà¸": 12330, + "ิ": 17092, + "ี": 22421, + "ี": 20278, + "ีà¹Ī": 31511, + "ื": 47991, + "ุ": 30524, + "ู": 35273, + "à¸ģ": 30767, + "à¸ģà¸": 31474, + "à¸Ħ": 31757, + "à¸Ħà¸": 39628, + "à¸ĩ": 24603, + "à¸ĩ": 33382, + "à¸Ī": 47608, + "à¸Ĭ": 46324, + "à¸Ķ": 31107, + "à¸Ķ": 38825, + "à¸ķ": 40273, + "à¸ķ": 41108, + "à¸Ĺ": 36171, + "à¸Ļ": 17474, + "à¸Ļ": 17639, + "à¸Ļà¸": 23121, + "à¸ļ": 33859, + "à¸ļ": 39616, + "à¸ŀ": 48171, + "à¸Ń": 13398, + "à¸Ń": 32818, + "à¸Ńà¸": 14649, + "à¸Ńà¸ĩ": 46622, + "à¹": 4484, + "à¹Ģ": 13729, + "à¹Ģà¸": 14076, + "à¹ģà¸": 23916, + "à¹Ĥ": 33118, + "à¹ĥ": 40962, + "à¹Ħà¸": 31718, + "à¹ĩ": 38699, + "à¹Ī": 11722, + "à¹ī": 13123, + "à¹Į": 28353, + "à¼": 46186, + "à½": 39219, + "á": 157, + "á": 413, + "á´": 19036, + "áµ": 17330, + "áĢ": 45932, + "áĥ": 24829, + "áĥ¦": 32193, + "â": 158, + "â": 414, + "â¤": 25087, + "⤵ï¸ı": 36026, + "â¬": 7930, + "â¬ħï¸ı": 42111, + "â¬Ĩ": 27718, + "â¬Ĩï¸ı": 32798, + "â¬ĩ": 10917, + "â¬ĩ": 39370, + "â¬ĩï¸ı": 25621, + "â¬ĩï¸ı": 13984, + "â¬ĩï¸ıâ¬ĩï¸ı": 40159, + "âĢ": 728, + "âĢ¢": 9485, + "âĢ¢": 2701, + "âĢ¢âĢ¢": 15006, + "âĢ¢âĢ¢": 47575, + "âĢ¢âĢ¢âĢ¢âĢ¢": 27502, + "âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢": 48630, + "âĢ¦": 7095, + "âĢ¦\"": 20215, + "âĢ¦..": 47779, + "âĢ¦.": 18615, + "âĢ¦/": 29842, + "âĢ¦": 959, + "âĢ¦âĢ¦": 40066, + "âĢ²": 32633, + "âĢ³": 25061, + "âĢ¼": 6578, + "âĢ¼ï¸ı": 15622, + "âĢ¼ï¸ı": 8310, + "âĢ¼ï¸ıâĢ¼ï¸ı": 33218, + "âĢĭ": 17086, + "âĢĭ": 9844, + "âĢį": 4244, + "âĢįâĻ": 5177, + "âĢįâĻĢï¸ı": 18897, + "âĢįâĻĢï¸ı": 9605, + "âĢįâĻĤ": 8832, + "âĢįâĻĤï¸ı": 21779, + "âĢįâĻĤï¸ı": 10613, + "âĢİ": 31001, + "âĢIJ": 34512, + "âĢĵ": 21070, + "âĢĵ": 1224, + "âĢĶ": 6718, + "âĢĶ": 2005, + "âĢĶ>": 26341, + "âĢĶ@": 28470, + "âĢĶâĢĶ": 10037, + "âĢĶâĢĶ": 44800, + "âĢĶâĢĶâĢĶâĢĶ": 17797, + "âĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶ": 34432, + "âĢķ": 14236, + "âģ": 1667, + "âģ£": 31089, + "âģ£": 16845, + "âģ¦": 2773, + "âģ¦": 34855, + "âģ¦@": 2859, + "âģ¦âģ¦@": 27783, + "âģ©": 20097, + "âģ©,": 48749, + "âģ©.": 35777, + "âģ©": 2918, + "âģīï¸ı": 46534, + "âģł": 23881, + "âģł": 13503, + "âģłâģł": 33488, + "âĤ": 5227, + "âĤ¬": 34919, + "âĤ¬": 6309, + "âĤ¹": 21777, + "âĥ": 2805, + "âĥ£": 11250, + "âĥ£": 3076, + "âĥ£@": 48291, + "âĦ": 8604, + "âĦ¢": 29438, + "âĦ¢": 11675, + "âĦ¹": 45462, + "âĨ": 6059, + "âĨĴ": 7481, + "âĨĵ": 41603, + "âĩ": 27228, + "âĪ": 17788, + "âī": 22684, + "âīĪ": 45451, + "âĮ": 17848, + "âĮļ": 31301, + "âĮļï¸ı": 35931, + "âı": 7960, + "âı©": 40847, + "âı°": 12714, + "âı±": 33149, + "âı³": 47617, + "âĵ": 27400, + "âĶ": 13389, + "âĶĢ": 45139, + "âĶģ": 42022, + "âķ": 17027, + "âķIJ": 48039, + "âĸ": 4168, + "âĸª": 21203, + "âĸª": 36628, + "âĸªï¸ı": 24974, + "âĸ«": 39478, + "âĸ¬": 33798, + "âĸ¬âĸ¬": 36975, + "âĸ¶": 12509, + "âĸ¶": 21126, + "âĸ¶ï¸ı": 14442, + "âĸº": 46061, + "âĸº": 12086, + "âĸ½": 45634, + "âĸł": 36791, + "âĹ": 9323, + "âĹĨ": 48961, + "âĹı": 26999, + "âĺ": 1741, + "âĺ®": 45851, + "âĺ¹": 28811, + "âĺ¹ï¸ı": 39605, + "âĺº": 5010, + "âĺº": 8703, + "âĺºâĺº": 46051, + "âĺºï¸ı": 11506, + "âĺºï¸ı": 7779, + "âĺºï¸ıâĺºï¸ı": 41315, + "âĺ¼": 38877, + "âĺĢ": 32146, + "âĺĢ": 22242, + "âĺĢï¸ı": 12817, + "âĺĢï¸ı": 8219, + "âĺĢï¸ıâĺĢï¸ı": 44550, + "âĺģ": 25195, + "âĺģï¸ı": 35197, + "âĺĥ": 38972, + "âĺħ": 9339, + "âĺħ": 10643, + "âĺħâĺħ": 12681, + "âĺħâĺħ": 36644, + "âĺħâĺħâĺħâĺħ": 34431, + "âĺħâĺħâĺħâĺħ": 44034, + "âĺħâĺħâĺħâĺħâĺħ": 45984, + "âĺĨ": 23941, + "âĺĨ": 13439, + "âĺİ": 24045, + "âĺİ": 45493, + "âĺİï¸ı": 27219, + "âĺij": 20983, + "âĺij": 42300, + "âĺijï¸ı": 22291, + "âĺĶï¸ı": 31238, + "âĺķ": 11454, + "âĺķ": 26561, + "âĺķï¸ı": 25839, + "âĺķï¸ı": 15499, + "âĺĺ": 23483, + "âĺĺï¸ı": 31454, + "âĺĿ": 21982, + "âĺĿï¸ı": 38891, + "âĺŀ": 31255, + "âĺłï¸ı": 34672, + "âĻ": 1548, + "âĻ¡": 11091, + "âĻ¡": 6251, + "âĻ¡âĻ¡": 22360, + "âĻ¡âĻ¡": 34267, + "âĻ¡âĻ¡âĻ¡": 36611, + "âĻ¤": 47435, + "âĻ¥": 4622, + "âĻ¥": 3405, + "âĻ¥âĻ¥": 12975, + "âĻ¥âĻ¥": 19604, + "âĻ¥âĻ¥âĻ¥": 23255, + "âĻ¥âĻ¥âĻ¥âĻ¥": 49020, + "âĻ¥ï¸ı": 17774, + "âĻ¥ï¸ı": 10561, + "âĻ¥ï¸ıâĻ¥ï¸ı": 40309, + "âĻ¦": 32376, + "âĻ¦": 47547, + "âĻ©": 30339, + "âĻ©âĻ«": 31636, + "âĻª": 27364, + "âĻª": 12382, + "âĻ«": 39217, + "âĻ«": 10814, + "âĻ¬": 24753, + "âĻ»": 39611, + "âĻ»ï¸ı": 46075, + "âļ": 2234, + "âļ¡": 40098, + "âļ¡": 20712, + "âļ¡ï¸ı": 19500, + "âļ¡ï¸ı": 11605, + "âļ¡ï¸ıâļ¡ï¸ı": 45922, + "âļª": 11922, + "âļª": 36373, + "âļªï¸ı": 22251, + "âļªï¸ı": 17885, + "âļ«": 15374, + "âļ«ï¸ı": 26529, + "âļ«ï¸ı": 24649, + "âļ½": 4867, + "âļ½": 13173, + "âļ½âļ½": 43259, + "âļ½ï¸ı": 11342, + "âļ½ï¸ı": 6768, + "âļ½ï¸ıâļ½ï¸ı": 30358, + "âļ½ï¸ıâļ½ï¸ı": 44148, + "âļ¾": 11314, + "âļ¾": 34717, + "âļ¾ï¸ı": 24727, + "âļ¾ï¸ı": 14858, + "âļĵ": 23522, + "âļĵï¸ı": 35299, + "âļĶï¸ı": 29361, + "âļľ": 47491, + "âļł": 39203, + "âļłï¸ı": 40966, + "âļłï¸ı": 15596, + "âĽ": 7956, + "âĽ³ï¸ı": 29204, + "âĽĦ": 30668, + "âĽĦï¸ı": 45465, + "âľ": 1508, + "⾨": 7181, + "⾨": 3531, + "⾨⾨": 35174, + "⾨⾨": 21985, + "⾨⾨⾨": 39424, + "âľĤ": 38602, + "âľħ": 29544, + "âľħ": 5564, + "âľĪ": 10682, + "âľĪ": 30712, + "âľĪï¸ı": 26176, + "âľĪï¸ı": 13413, + "âľĬ": 12392, + "âľĬ": 17819, + "âľĬðŁı½": 48547, + "âľĬðŁı¾": 41185, + "âľĭ": 39383, + "âľĭ": 30239, + "âľĮ": 6419, + "âľĮ": 12656, + "âľĮï¸ı": 21906, + "âľĮï¸ı": 12239, + "âľĮðŁı»": 30538, + "âľĮðŁı¼": 30588, + "âľį": 20872, + "âľįï¸ı": 30888, + "âľı": 32574, + "âľıï¸ı": 40724, + "âľĵ": 36700, + "âľĶ": 47200, + "âľĶ": 13749, + "âľĶï¸ı": 40544, + "âľĶï¸ı": 9191, + "âľĸï¸ı": 44133, + "âľĿ": 42220, + "âĿ": 1045, + "âĿ£": 37007, + "âĿ£": 25623, + "âĿ£ï¸ı": 25240, + "âĿ¤": 1266, + "âĿ¤": 2720, + "âĿ¤âĿ¤": 9033, + "âĿ¤âĿ¤": 14058, + "âĿ¤âĿ¤âĿ¤": 16708, + "âĿ¤âĿ¤âĿ¤âĿ¤": 37918, + "âĿ¤âĿ¤âĿ¤âĿ¤": 43970, + "âĿ¤ï¸ı": 2626, + "âĿ¤ï¸ı#": 30281, + "âĿ¤ï¸ı.": 45326, + "âĿ¤ï¸ı": 1752, + "âĿ¤ï¸ı@": 31187, + "âĿ¤ï¸ıâĿ¤ï¸ı": 6713, + "âĿ¤ï¸ıâĿ¤ï¸ı": 10363, + "âĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ı": 12282, + "âĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ı": 39167, + "âĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ı": 29880, + "âĿ¤ï¸ıðŁĴĻ": 37380, + "âĿ¤ï¸ıðŁĺį": 37272, + "âĿ¤ï¸ıðŁĺĺ": 41800, + "âĿ¤ðŁĺį": 49120, + "âĿ¥": 36914, + "âĿĦ": 8501, + "âĿĦ": 30494, + "âĿĦï¸ı": 16834, + "âĿĦï¸ı": 12402, + "âĿĦï¸ıâĿĦï¸ı": 41626, + "âĿĮ": 44485, + "âĿĮ": 17975, + "âĿĵ": 29791, + "âĿĹ": 12868, + "âĿĹ": 29079, + "âĿĹï¸ı": 28642, + "âĿĹï¸ı": 17391, + "âĿĿ": 46951, + "âŀ": 3257, + "âŀ¡": 12854, + "âŀ¡ï¸ı": 31860, + "âŀ¡ï¸ı": 4956, + "âŀ¤": 18651, + "âŀķ": 46526, + "âŀĸ": 21327, + "âŀĸ": 34902, + "âŀĸâŀĸ": 23316, + "âŀĸâŀĸâŀĸâŀĸ": 40401, + "âŀľ": 23775, + "âł": 5689, + "âłĢ": 9691, + "âłĢ": 8621, + "âłĢâłĢ": 11466, + "âłĢâłĢ": 39092, + "âłĢâłĢâłĢâłĢ": 20976, + "âłĢâłĢâłĢâłĢâłĢâłĢâłĢâłĢ": 46063, + "âŃ": 5527, + "âŃIJ": 6410, + "âŃIJ": 19012, + "âŃIJâŃIJ": 32663, + "âŃIJï¸ı": 12427, + "âŃIJï¸ı": 10251, + "âŃIJï¸ıâŃIJï¸ı": 18640, + "âŃIJï¸ıâŃIJï¸ıâŃIJï¸ı": 40746, + "ã": 159, + "ã": 415, + "ãĢ": 4092, + "ãĢģ": 45262, + "ãĢĤ": 38060, + "ãĢĤ": 38000, + "ãĢĬ": 39920, + "ãĢĭ": 32898, + "ãĢĮ": 18116, + "ãĢį": 19149, + "ãĢİ": 26947, + "ãĢı": 30293, + "ãĢIJ": 12534, + "ãĢij": 12990, + "ãĢľ": 39581, + "ãģ": 4813, + "ãģ¦": 48029, + "ãģ¨": 34671, + "ãģ¨ç¹ĭãģ": 47310, + "ãģ¨ç¹ĭãģĮãĤĬãģŁãģĦ": 48290, + "ãģª": 29104, + "ãģ®": 21575, + "ãģ·": 44130, + "ãģĦ": 33523, + "ãģĦ": 38850, + "ãģĨ": 44235, + "ãģį": 42184, + "ãĤ": 3909, + "ãĤ¢": 26560, + "ãĤ¤": 19319, + "ãĤ¤ãĥ": 36294, + "ãĤ«": 37367, + "ãĤ¯": 31574, + "ãĤ·": 37665, + "ãĤ¸": 32234, + "ãĤ¸ãĥ": 43491, + "ãĤ¹": 22694, + "ãĤ¹": 39220, + "ãĤ¹ãĥ": 32421, + "ãĤ¿": 34941, + "ãĤĬãģ": 40500, + "ãĤĮ": 45211, + "ãĤŃ": 47121, + "ãĥ": 2429, + "ãĥ©": 23007, + "ãĥª": 32115, + "ãĥ«": 33257, + "ãĥ¬": 32965, + "ãĥ³": 17671, + "ãĥ³": 26875, + "ãĥ³ãĤ": 45105, + "ãĥ³ãĥ": 25914, + "ãĥ»": 8415, + "ãĥ»": 11158, + "ãĥ»ãĥ»": 13949, + "ãĥ»ãĥ»ãĥ»": 14234, + "ãĥ¼": 13457, + "ãĥ¼": 30391, + "ãĥ¼ãĥ": 18584, + "ãĥĥ": 28902, + "ãĥĦ": 32173, + "ãĥĪ": 42384, + "ãĥİ": 39967, + "ãĥķãĤ": 33371, + "ãĥŀ": 48924, + "ãĥŃ": 35827, + "ãħ": 5947, + "ãħ¤": 21096, + "ãħ¤ãħ¤": 22583, + "ãħ¤ãħ¤ãħ¤ãħ¤": 39329, + "ãħĭ": 13052, + "ãħĭ": 25108, + "ãħĭãħĭ": 16604, + "ãħĭãħĭ": 42581, + "ãħĭãħĭãħĭ": 46407, + "ãħĭãħĭãħĭãħĭ": 39362, + "ãħł": 16089, + "ãħł": 25781, + "ãħłãħł": 22021, + "ãħłãħł": 34398, + "ãħłãħłãħłãħł": 47028, + "ä": 160, + "ä": 416, + "ä¸": 19759, + "ä¹": 41854, + "äº": 21078, + "人": 36839, + "ä»": 37743, + "ä½": 47466, + "å": 161, + "å": 417, + "å¤": 23170, + "å¥": 29290, + "å®": 27047, + "å°": 34720, + "å±": 46096, + "å¸": 42021, + "å¹": 38780, + "åħ": 34314, + "åĨ": 27972, + "åĨĻ": 44653, + "åĪ": 42748, + "åĭ": 47505, + "åı": 34517, + "åIJ": 41673, + "åĽ": 39027, + "åľ": 37746, + "åŃ": 35751, + "æ": 162, + "æ": 418, + "æĸ": 29032, + "æĹ": 22265, + "æĹ¥": 39121, + "æĹ¥": 37156, + "æĺ": 42891, + "æĻ": 48132, + "æľ": 19277, + "æľ¬": 44353, + "æĿ": 27667, + "æĿ±": 48338, + "ç": 163, + "ç": 419, + "ç¥": 26369, + "ç¥Ń": 42557, + "çµ": 37810, + "ç¹": 43431, + "ç¹ĭãģ": 45930, + "çĶ": 20211, + "çĶŁ": 33375, + "çľ": 33440, + "羣": 41570, + "è": 164, + "è": 420, + "èª": 34002, + "èªķ": 41293, + "é": 165, + "é": 421, + "éģ": 44854, + "éĩ": 38283, + "ê": 166, + "ê": 422, + "ê°": 21122, + "ê°ĵ": 41076, + "ê°ĵìĦ¸ë¸IJ": 41689, + "ê°ķ": 45758, + "ê²": 35555, + "ê³": 36216, + "êµ": 31871, + "ê·": 42680, + "ê¸": 32495, + "ê¹": 24531, + "ê¹Ģ": 25203, + "ë": 167, + "ë": 423, + "ë¦": 24621, + "리": 47649, + "ë§": 28024, + "ë§Ī": 40027, + "ëª": 36311, + "ë¯": 19528, + "민": 34442, + "민": 44632, + "ë°": 15810, + "ë°©": 23273, + "ë°©íĥ": 25081, + "ë°©íĥĦ": 25641, + "ë°©íĥĦìĨĮëħĦëĭ": 26068, + "ë°©íĥĦìĨĮëħĦëĭ¨": 27129, + "ë°ķ": 40988, + "ë²": 48267, + "ë³": 44693, + "ë¹": 24193, + "ëĤ": 27252, + "ëĤĺ": 48484, + "ëĭ": 13094, + "ëĭ¤": 46680, + "ëĭĪ": 33708, + "ëį": 45543, + "ëı": 31972, + "ëĵ": 30850, + "ëĿ": 44317, + "ì": 168, + "ì": 424, + "ì£": 39856, + "주": 45161, + "ì¤": 31153, + "ì§": 16279, + "ì§Ģ": 28836, + "ì§Ħ": 38890, + "ì°": 40742, + "ì¶": 42476, + "ì¶ķ": 46403, + "ì¶ķíķĺ": 47866, + "ì¹": 45088, + "ìĤ": 31061, + "ìĥ": 30587, + "ìĥĿ": 47858, + "ìĦ": 15074, + "ìĦ¸ë": 29254, + "ìĦ¸ë¸": 29658, + "ìĦ¸ë¸IJ": 41415, + "ìĨ": 15115, + "ìĨĮë": 20515, + "ìĨĮëħ": 21391, + "ìĨĮëħĦëĭ": 25887, + "ìĪ": 32757, + "ìĬ": 12125, + "ìĬ¤": 20305, + "ìĬ¤": 23829, + "ìĭ": 23924, + "ìķ": 16071, + "ìķĦ": 23233, + "ìĸ": 31625, + "ìĹ": 13252, + "ìĹIJ": 37622, + "ìĹij": 31036, + "ìĹijìĨ": 42763, + "ìĹijìĨĮ": 45606, + "ìĺ": 21144, + "ìĻ": 39405, + "ìļ": 18541, + "ìļ°": 38415, + "ìļ°": 49344, + "ìĽ": 22543, + "ìĽIJ": 36495, + "ìľ": 20909, + "ìľł": 42890, + "ìĿ": 8276, + "ìĿ´": 12286, + "ìĿ´": 34746, + "ìĿ´ì": 37590, + "ìĿ¼": 43406, + "ìŀ": 20849, + "ìł": 20580, + "ìłķ": 34725, + "í": 169, + "í": 425, + "íģ": 35641, + "íģ¬": 45832, + "íĤ": 43565, + "íĥ": 15012, + "íĥĢ": 41126, + "íĥľ": 37663, + "íĬ": 23215, + "íĬ¸": 48974, + "íĬ¸": 39820, + "íĭ": 34350, + "íĶ": 29450, + "íķ": 15197, + "íķ´": 35286, + "íķĺ": 33992, + "íĺ": 15962, + "íĺ¸": 39657, + "íĺĦ": 34645, + "íĻ": 31882, + "î": 170, + "î": 426, + "îĢ": 36288, + "îĦ": 35368, + "îĮ": 41006, + "îIJ": 16929, + "îIJĴ": 40100, + "ï": 171, + "ï": 427, + "ï¸": 842, + "ï¸İ": 24029, + "ï¸ı": 1392, + "ï¸ı#": 46997, + "ï¸ı:": 32604, + "ï¸ı": 1001, + "ï¸ı@": 34600, + "ï¸ıâĥ£": 17394, + "ï¸ıâĥ£-": 40376, + "ï¸ıâĥ£": 4603, + "ï¿": 27850, + "�": 47356, + "�": 39802, + "ð": 172, + "ð": 428, + "ðĿ": 6874, + "ðĿIJ": 15889, + "ðĿij": 43794, + "ðĿĴ": 43387, + "ðĿĵ": 47110, + "ðĿĹ": 18865, + "ðĿĺ": 26109, + "ðĿĻ": 29415, + "ðŁ": 558, + "ðŁ¤": 1793, + "ðŁ¤£": 9665, + "ðŁ¤£": 9909, + "ðŁ¤£ðŁ¤£": 16430, + "ðŁ¤£ðŁ¤£": 31009, + "ðŁ¤£ðŁ¤£ðŁ¤£": 32262, + "ðŁ¤¤": 39550, + "ðŁ¤¤": 26759, + "ðŁ¤¦": 17186, + "ðŁ¤§": 40983, + "ðŁ¤©": 27351, + "ðŁ¤©": 16074, + "ðŁ¤ª": 44230, + "ðŁ¤ª": 24920, + "ðŁ¤«": 47671, + "ðŁ¤¯": 37595, + "ðŁ¤·": 13185, + "ðŁ¤·ðŁı»âĢįâĻĢï¸ı": 46770, + "ðŁ¤ij": 34801, + "ðŁ¤ĵ": 36580, + "ðŁ¤ĵ": 18928, + "ðŁ¤Ķ": 12706, + "ðŁ¤Ķ": 6497, + "ðŁ¤ĶðŁ¤Ķ": 28490, + "ðŁ¤ĶðŁ¤ĶðŁ¤Ķ": 43361, + "ðŁ¤ĸ": 46146, + "ðŁ¤Ĺ": 16646, + "ðŁ¤Ĺ": 10465, + "ðŁ¤ĹðŁ¤Ĺ": 44321, + "ðŁ¤ĺ": 10623, + "ðŁ¤ĺ": 17288, + "ðŁ¤ĺðŁı»": 46449, + "ðŁ¤ĺðŁı»": 30891, + "ðŁ¤ĺðŁı¼": 31458, + "ðŁ¤ĺðŁı½": 49362, + "ðŁ¤Ļ": 23800, + "ðŁ¤Ļ": 39101, + "ðŁ¤Ŀ": 35242, + "ðŁ¤ŀ": 29463, + "ðŁ¤ŀ": 38597, + "ðŁ¤Ł": 48509, + "ðŁ¤ł": 36737, + "ðŁ¤Ń": 47289, + "ðŁ¥": 4156, + "ðŁ¥°": 29246, + "ðŁ¥°": 17597, + "ðŁ¥³": 45823, + "ðŁ¥³": 28055, + "ðŁ¥º": 43380, + "ðŁ¥º": 36858, + "ðŁ¥Ĥ": 43805, + "ðŁ¥Ĥ": 25212, + "ðŁ¥ĥ": 47790, + "ðŁ¥ĩ": 34372, + "ðŁ¥ĩ": 20069, + "ðŁ¥Ī": 35858, + "ðŁ¥ī": 36782, + "ðŁ¥Ĭ": 29275, + "ðŁ¦": 6040, + "ðŁ¦ģ": 36367, + "ðŁ¦ģ": 26056, + "ðŁ¦ĥ": 40184, + "ðŁ¦Ħ": 37659, + "ðŁ¦ħ": 28800, + "ðŁ¦Ī": 48984, + "ðŁ¦ĭ": 49325, + "ðŁ¦ĭ": 28985, + "ðŁ§": 8792, + "ðŁ§¡": 30996, + "ðŁ§¡": 24578, + "ðŁ§IJ": 33549, + "ðŁħ": 22010, + "ðŁĨ": 9536, + "ðŁĨķ": 34956, + "ðŁĨĺ": 39868, + "ðŁĨļ": 16325, + "ðŁĩ": 1173, + "ðŁĩ¦": 12469, + "ðŁĩ¦": 28565, + "ðŁĩ¦ðŁĩ": 33196, + "ðŁĩ¦ðŁĩ·": 41629, + "ðŁĩ¦ðŁĩº": 25192, + "ðŁĩ§": 14660, + "ðŁĩ§ðŁĩ": 37342, + "ðŁĩ§ðŁĩª": 38794, + "ðŁĩ§ðŁĩ·": 28182, + "ðŁĩ¨": 8889, + "ðŁĩ¨ðŁĩ": 8989, + "ðŁĩ¨ðŁĩ¦": 34324, + "ðŁĩ¨ðŁĩ¦": 16364, + "ðŁĩ¨ðŁĩ³": 36819, + "ðŁĩ¨ðŁĩŃ": 41119, + "ðŁĩ©": 15222, + "ðŁĩ©ðŁĩ": 36350, + "ðŁĩ©ðŁĩª": 21531, + "ðŁĩª": 11428, + "ðŁĩª": 12331, + "ðŁĩªðŁĩ": 13917, + "ðŁĩªðŁĩ¸": 22177, + "ðŁĩªðŁĩº": 34655, + "ðŁĩ«": 12977, + "ðŁĩ«ðŁĩ·": 39109, + "ðŁĩ«ðŁĩ·": 16223, + "ðŁĩ¬": 8129, + "ðŁĩ¬ðŁĩ": 8354, + "ðŁĩ¬ðŁĩ§": 23762, + "ðŁĩ¬ðŁĩ§": 11559, + "ðŁĩ®": 8268, + "ðŁĩ®ðŁĩ": 8347, + "ðŁĩ®ðŁĩª": 34148, + "ðŁĩ®ðŁĩ³": 47299, + "ðŁĩ®ðŁĩ³": 23602, + "ðŁĩ®ðŁĩ¹": 42034, + "ðŁĩ®ðŁĩ¹": 17070, + "ðŁĩ¯": 20090, + "ðŁĩ¯ðŁĩ": 22924, + "ðŁĩ¯ðŁĩµ": 26527, + "ðŁĩ°": 28232, + "ðŁĩ±": 29533, + "ðŁĩ±ðŁĩ": 40941, + "ðŁĩ²": 16411, + "ðŁĩ²ðŁĩ": 17562, + "ðŁĩ²ðŁĩ½": 32073, + "ðŁĩ³": 16645, + "ðŁĩ³ðŁĩ": 17747, + "ðŁĩ³ðŁĩ±": 36747, + "ðŁĩµ": 12127, + "ðŁĩµðŁĩ": 13608, + "ðŁĩµðŁĩ°": 37764, + "ðŁĩµðŁĩ¹": 42621, + "ðŁĩµðŁĩŃ": 42777, + "ðŁĩ·": 16026, + "ðŁĩ·": 9869, + "ðŁĩ·ðŁĩº": 37902, + "ðŁĩ¸": 19447, + "ðŁĩ¸ðŁĩ": 33325, + "ðŁĩ¸ðŁĩª": 39260, + "ðŁĩ¹": 21810, + "ðŁĩ¹ðŁĩ": 36250, + "ðŁĩº": 4054, + "ðŁĩº": 17467, + "ðŁĩºðŁĩ": 4131, + "ðŁĩºðŁĩ¸": 8907, + "ðŁĩºðŁĩ¸": 5688, + "ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸": 18739, + "ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸": 41411, + "ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸": 43357, + "ðŁĩ¿": 25520, + "ðŁĩ¿ðŁĩ¦": 36982, + "ðŁĩŃ": 30370, + "ðŁĮ": 1576, + "ðŁĮ±": 35318, + "ðŁĮ±": 20665, + "ðŁĮ²": 34071, + "ðŁĮ²": 28154, + "ðŁĮ³": 44265, + "ðŁĮ³": 28543, + "ðŁĮ´": 20643, + "ðŁĮ´": 15968, + "ðŁĮµ": 40871, + "ðŁĮ·": 32328, + "ðŁĮ·": 24259, + "ðŁĮ¸": 16314, + "ðŁĮ¸": 10980, + "ðŁĮ¸ðŁĮ¸": 46210, + "ðŁĮ¹": 14990, + "ðŁĮ¹": 10662, + "ðŁĮ¹ðŁĮ¹": 37933, + "ðŁĮº": 27608, + "ðŁĮº": 19829, + "ðŁĮ»": 27196, + "ðŁĮ»": 19772, + "ðŁĮ¼": 36484, + "ðŁĮ¼": 26312, + "ðŁĮ¾": 39796, + "ðŁĮ¿": 27736, + "ðŁĮ¿": 18588, + "ðŁĮĢ": 34348, + "ðŁĮħ": 27547, + "ðŁĮĪ": 23038, + "ðŁĮĪ": 13042, + "ðŁĮĬ": 20465, + "ðŁĮĬ": 14302, + "ðŁĮĮ": 43393, + "ðŁĮį": 34931, + "ðŁĮį": 18641, + "ðŁĮİ": 31125, + "ðŁĮİ": 16969, + "ðŁĮı": 31527, + "ðŁĮIJ": 33071, + "ðŁĮĻ": 42330, + "ðŁĮĻ": 23283, + "ðŁĮļ": 49004, + "ðŁĮļ": 27877, + "ðŁĮŀ": 21152, + "ðŁĮŀ": 12980, + "ðŁĮŁ": 13196, + "ðŁĮŁ": 8542, + "ðŁĮŁðŁĮŁ": 26014, + "ðŁį": 2011, + "ðŁį¦": 47375, + "ðŁį¦": 32032, + "ðŁį©": 38379, + "ðŁįª": 38958, + "ðŁį«": 47994, + "ðŁį«": 33401, + "ðŁį°": 43732, + "ðŁį°": 30051, + "ðŁį³": 37441, + "ðŁį´": 41531, + "ðŁį´": 25338, + "ðŁį·": 24445, + "ðŁį·": 18072, + "ðŁį¸": 43058, + "ðŁį¸": 31217, + "ðŁį¹": 35598, + "ðŁįº": 31081, + "ðŁįº": 21590, + "ðŁį»": 22793, + "ðŁį»": 13167, + "ðŁį¾": 27294, + "ðŁį¾": 21656, + "ðŁįĢ": 22865, + "ðŁįĢ": 15764, + "ðŁįģ": 29837, + "ðŁįģ": 23075, + "ðŁįĤ": 35015, + "ðŁįĤ": 25721, + "ðŁįĥ": 27157, + "ðŁįĥ": 20147, + "ðŁįĩ": 48697, + "ðŁįĬ": 35001, + "ðŁįĬ": 28036, + "ðŁįĭ": 39543, + "ðŁįĮ": 44987, + "ðŁįį": 48946, + "ðŁįİ": 32069, + "ðŁįij": 32889, + "ðŁįĴ": 33160, + "ðŁįĵ": 44739, + "ðŁįĵ": 33456, + "ðŁįĶ": 46415, + "ðŁįĶ": 36031, + "ðŁįķ": 31469, + "ðŁįķ": 23904, + "ðŁįŃ": 42100, + "ðŁİ": 1165, + "ðŁİ£": 43158, + "ðŁİ¤": 23490, + "ðŁİ¤": 15690, + "ðŁİ¥": 22186, + "ðŁİ¥:": 43640, + "ðŁİ¥": 13233, + "ðŁİ§": 31254, + "ðŁİ§": 14266, + "ðŁİ¨": 31953, + "ðŁİ¨": 13461, + "ðŁİ©": 37701, + "ðŁİ«": 30331, + "ðŁİ¬": 36020, + "ðŁİ¬": 18150, + "ðŁİ®": 29312, + "ðŁİ¯": 23114, + "ðŁİµ": 27435, + "ðŁİµ": 14946, + "ðŁİ¶": 11755, + "ðŁİ¶": 6011, + "ðŁİ¶ðŁİ¶": 36283, + "ðŁİ¸": 29135, + "ðŁİ¸": 22122, + "ðŁİ¹": 43493, + "ðŁİ¼": 34949, + "ðŁİ¼": 23757, + "ðŁİ¾": 41982, + "ðŁİ¾": 24222, + "ðŁİĢ": 34347, + "ðŁİĢ": 20151, + "ðŁİģ": 18368, + "ðŁİģ": 13462, + "ðŁİĤ": 13026, + "ðŁİĤ": 10392, + "ðŁİĤðŁİĤ": 39338, + "ðŁİĥ": 22622, + "ðŁİĥ": 16780, + "ðŁİĦ": 12942, + "ðŁİĦ": 11267, + "ðŁİħ": 17685, + "ðŁİħ": 24276, + "ðŁİĨ": 39222, + "ðŁİĪ": 16142, + "ðŁİĪ": 14448, + "ðŁİĪðŁİī": 48049, + "ðŁİī": 4310, + "ðŁİī:": 17310, + "ðŁİī": 3986, + "ðŁİīðŁİ": 11473, + "ðŁİīðŁİĪ": 40499, + "ðŁİīðŁİĪ": 34008, + "ðŁİīðŁİī": 25159, + "ðŁİīðŁİī": 13450, + "ðŁİīðŁİīðŁİī": 20828, + "ðŁİīðŁİĬ": 31662, + "ðŁİīðŁİĬ": 30781, + "ðŁİĬ": 22763, + "ðŁİĬ": 22425, + "ðŁİĬðŁİī": 48801, + "ðŁİĵ": 28916, + "ðŁİĵ": 18744, + "ðŁİĻ": 29001, + "ðŁİĻ": 29753, + "ðŁİĻï¸ı": 44205, + "ðŁİŁ": 19248, + "ðŁİŁ": 21107, + "ðŁİŁï¸ı": 30243, + "ðŁİŃ": 28856, + "ðŁı": 1109, + "ðŁı¡": 27318, + "ðŁı³ï¸ı": 26844, + "ðŁı³ï¸ıâĢį": 27093, + "ðŁı³ï¸ıâĢįðŁĮĪ": 32610, + "ðŁı´": 39690, + "ðŁı´": 19704, + "ðŁı»": 5042, + "ðŁı»": 3702, + "ðŁı»âĢį": 46250, + "ðŁı»âĢįâĻĢï¸ı": 48391, + "ðŁı»âĢįâĻĢï¸ı": 23595, + "ðŁı»âĢįâĻĤï¸ı": 30984, + "ðŁı¼": 6193, + "ðŁı¼": 4027, + "ðŁı¼âĢįâĻĢï¸ı": 28955, + "ðŁı½": 8514, + "ðŁı½": 6114, + "ðŁı½âĢįâĻĢï¸ı": 37036, + "ðŁı½âĢįâĻĤï¸ı": 43157, + "ðŁı¾": 10230, + "ðŁı¾": 7778, + "ðŁı¾âĢįâĻĤï¸ı": 47189, + "ðŁı¿": 29854, + "ðŁı¿": 21094, + "ðŁıĢ": 13708, + "ðŁıĢ": 8813, + "ðŁıĢðŁıĢ": 43169, + "ðŁıģ": 29423, + "ðŁıģ": 17473, + "ðŁıĥ": 16820, + "ðŁıĥ": 32751, + "ðŁıħ": 25500, + "ðŁıĨ": 9585, + "ðŁıĨ": 5596, + "ðŁıĨðŁıĨ": 18946, + "ðŁıĨðŁıĨ": 38269, + "ðŁıĨðŁıĨðŁıĨ": 44484, + "ðŁıĩ": 45789, + "ðŁıĩ": 40288, + "ðŁıĪ": 16144, + "ðŁıĪ": 10477, + "ðŁıī": 26020, + "ðŁıĬ": 33061, + "ðŁıĬ": 47830, + "ðŁıĮ": 41116, + "ðŁıı": 32460, + "ðŁıIJ": 46334, + "ðŁıIJ": 29433, + "ðŁıĴ": 37756, + "ðŁıŁ": 35914, + "ðŁıŁ": 26472, + "ðŁıŁï¸ı": 42627, + "ðŁıł": 33727, + "ðŁIJ": 2074, + "ðŁIJ¢": 37049, + "ðŁIJ£": 39597, + "ðŁIJ¥": 42981, + "ðŁIJ¦": 37260, + "ðŁIJ¬": 44238, + "ðŁIJ¯": 34825, + "ðŁIJ¯": 26111, + "ðŁIJ°": 35378, + "ðŁIJ°": 25050, + "ðŁIJ±": 35710, + "ðŁIJ±": 22979, + "ðŁIJ´": 33509, + "ðŁIJ¶": 14466, + "ðŁIJ¶": 10631, + "ðŁIJ·": 38408, + "ðŁIJ¸": 45597, + "ðŁIJ¸": 40298, + "ðŁIJº": 44281, + "ðŁIJº": 31445, + "ðŁIJ»": 30750, + "ðŁIJ»": 25322, + "ðŁIJ¼": 46234, + "ðŁIJ¾": 16057, + "ðŁIJ¾": 11317, + "ðŁIJ¾ðŁIJ¾": 42202, + "ðŁIJī": 46908, + "ðŁIJĬ": 43974, + "ðŁIJį": 48903, + "ðŁIJį": 30177, + "ðŁIJİ": 48281, + "ðŁIJİ": 32726, + "ðŁIJIJ": 47735, + "ðŁIJIJ": 27954, + "ðŁIJij": 49389, + "ðŁIJķ": 41069, + "ðŁIJĺ": 38733, + "ðŁIJĿ": 30619, + "ðŁIJĿ": 20111, + "ðŁIJŁ": 42084, + "ðŁIJŁ": 29989, + "ðŁIJł": 42725, + "ðŁij": 964, + "ðŁij£": 39755, + "ðŁij§": 48938, + "ðŁij¨": 18966, + "ðŁij¨âĢį": 25023, + "ðŁij©": 18800, + "ðŁij©âĢį": 26304, + "ðŁij«": 47106, + "ðŁij«": 35457, + "ðŁij®": 42686, + "ðŁij¯": 25910, + "ðŁij¯": 20582, + "ðŁij¶": 26187, + "ðŁij¶": 33189, + "ðŁij¸": 26268, + "ðŁij¸": 36645, + "ðŁij¹": 46766, + "ðŁij»": 24625, + "ðŁij»": 16243, + "ðŁij¼": 25270, + "ðŁij¼": 31083, + "ðŁij½": 42677, + "ðŁij½": 26257, + "ðŁijĢ": 11524, + "ðŁijĢ": 5908, + "ðŁijĢðŁijĢ": 31561, + "ðŁijģ": 47796, + "ðŁijģ": 45705, + "ðŁijĦ": 47445, + "ðŁijħ": 31833, + "ðŁijħ": 24672, + "ðŁijĨ": 42975, + "ðŁijĨ": 45194, + "ðŁijĩ": 7662, + "ðŁijĩ": 7475, + "ðŁijĩðŁı»": 45811, + "ðŁijĩðŁı»": 32813, + "ðŁijĩðŁı¼": 37504, + "ðŁijĩðŁijĩ": 17915, + "ðŁijĩðŁijĩ": 31891, + "ðŁijĩðŁijĩðŁijĩ": 35627, + "ðŁijĪ": 32794, + "ðŁijĪ": 20832, + "ðŁijī": 9477, + "ðŁijī": 3988, + "ðŁijīðŁı»": 23481, + "ðŁijīðŁı¼": 27534, + "ðŁijīðŁı½": 38059, + "ðŁijīðŁijī": 41480, + "ðŁijĬ": 8897, + "ðŁijĬ": 9704, + "ðŁijĬðŁı»": 47393, + "ðŁijĬðŁı»": 29152, + "ðŁijĬðŁı¼": 49000, + "ðŁijĬðŁı¼": 30115, + "ðŁijĬðŁijĬ": 46521, + "ðŁijĭ": 19351, + "ðŁijĭ": 17686, + "ðŁijĮ": 4890, + "ðŁijĮ": 4494, + "ðŁijĮðŁı»": 31818, + "ðŁijĮðŁı»": 18606, + "ðŁijĮðŁı¼": 37655, + "ðŁijĮðŁı¼": 20031, + "ðŁijĮðŁı½": 35834, + "ðŁijĮðŁijĮ": 36139, + "ðŁijĮðŁijĮ": 21435, + "ðŁijĮðŁijĮðŁijĮ": 40876, + "ðŁijį": 4686, + "ðŁijį": 4201, + "ðŁijįðŁı»": 25803, + "ðŁijįðŁı»": 15129, + "ðŁijįðŁı¼": 37285, + "ðŁijįðŁı¼": 19689, + "ðŁijįðŁı½": 43722, + "ðŁijįðŁijį": 33012, + "ðŁijįðŁijį": 18997, + "ðŁijįðŁijįðŁijį": 37284, + "ðŁijİ": 39702, + "ðŁijİ": 32568, + "ðŁijı": 3802, + "ðŁijı": 4829, + "ðŁijıðŁı»": 19236, + "ðŁijıðŁı»": 17029, + "ðŁijıðŁı»ðŁijıðŁı»": 35254, + "ðŁijıðŁı¼": 24496, + "ðŁijıðŁı¼": 19979, + "ðŁijıðŁı¼ðŁijıðŁı¼": 46712, + "ðŁijıðŁı½": 40796, + "ðŁijıðŁı½": 33978, + "ðŁijıðŁı¾": 45450, + "ðŁijıðŁijı": 10356, + "ðŁijıðŁijı": 16706, + "ðŁijıðŁijıðŁijı": 17254, + "ðŁijIJ": 40877, + "ðŁijij": 14955, + "ðŁijij": 8717, + "ðŁijijðŁijij": 48532, + "ðŁijķ": 47865, + "ðŁijŁ": 41183, + "ðŁijł": 41264, + "ðŁijŃ": 34175, + "ðŁijŃ": 27943, + "ðŁĴ": 837, + "ðŁĴ¡": 24081, + "ðŁĴ£": 36862, + "ðŁĴ£": 29006, + "ðŁĴ¤": 34706, + "ðŁĴ¤": 25632, + "ðŁĴ¥": 12209, + "ðŁĴ¥": 7347, + "ðŁĴ¥ðŁĴ¥": 27396, + "ðŁĴ¥ðŁĴ¥": 39246, + "ðŁĴ¥ðŁĴ¥ðŁĴ¥": 48890, + "ðŁĴ¦": 21180, + "ðŁĴ¦": 14060, + "ðŁĴ¦ðŁĴ¦": 44469, + "ðŁĴ§": 34095, + "ðŁĴ¨": 27408, + "ðŁĴ¨": 17891, + "ðŁĴ©": 48621, + "ðŁĴ©": 28847, + "ðŁĴª": 5475, + "ðŁĴª": 6440, + "ðŁĴªðŁı»": 31669, + "ðŁĴªðŁı»": 21903, + "ðŁĴªðŁı¼": 32041, + "ðŁĴªðŁı¼": 20759, + "ðŁĴªðŁı½": 46380, + "ðŁĴªðŁı½": 31111, + "ðŁĴªðŁı¾": 39398, + "ðŁĴªðŁĴª": 24747, + "ðŁĴªðŁĴªðŁĴª": 39913, + "ðŁĴ«": 25770, + "ðŁĴ«": 12526, + "ðŁĴ¬": 30947, + "ðŁĴ¯": 10611, + "ðŁĴ¯": 7018, + "ðŁĴ¯ðŁĴ¯": 30234, + "ðŁĴ¯ðŁĴ¯": 44070, + "ðŁĴ°": 20454, + "ðŁĴ°": 14078, + "ðŁĴ°ðŁĴ°": 41747, + "ðŁĴµ": 47412, + "ðŁĴµ": 38041, + "ðŁĴ¸": 37696, + "ðŁĴ¸": 25957, + "ðŁĴ»": 33433, + "ðŁĴ»": 18135, + "ðŁĴ¿": 39541, + "ðŁĴĢ": 14888, + "ðŁĴĢ": 12158, + "ðŁĴĢðŁĴĢ": 30884, + "ðŁĴģ": 13997, + "ðŁĴģ": 14392, + "ðŁĴĥ": 9947, + "ðŁĴĥ": 14333, + "ðŁĴĥðŁı»": 38624, + "ðŁĴĥðŁĴĥ": 28041, + "ðŁĴĦ": 46116, + "ðŁĴĦ": 34571, + "ðŁĴħ": 27457, + "ðŁĴħ": 32414, + "ðŁĴī": 44316, + "ðŁĴī": 30503, + "ðŁĴĭ": 12217, + "ðŁĴĭ": 7417, + "ðŁĴĭðŁĴĭ": 29214, + "ðŁĴĮ": 40817, + "ðŁĴį": 35850, + "ðŁĴį": 24898, + "ðŁĴİ": 25938, + "ðŁĴİ": 15874, + "ðŁĴIJ": 27375, + "ðŁĴIJ": 20554, + "ðŁĴij": 49404, + "ðŁĴĵ": 20628, + "ðŁĴĵ": 12568, + "ðŁĴĵðŁĴĵ": 43505, + "ðŁĴĶ": 18880, + "ðŁĴĶ": 10704, + "ðŁĴĶðŁĴĶ": 44673, + "ðŁĴķ": 5412, + "ðŁĴķ": 3082, + "ðŁĴķðŁĴķ": 23106, + "ðŁĴķðŁĴķ": 14117, + "ðŁĴķðŁĴķðŁĴķ": 26772, + "ðŁĴĸ": 8466, + "ðŁĴĸ": 5582, + "ðŁĴĸðŁĴĸ": 19562, + "ðŁĴĸðŁĴĸ": 30595, + "ðŁĴĸðŁĴĸðŁĴĸ": 33915, + "ðŁĴĹ": 10148, + "ðŁĴĹ": 6690, + "ðŁĴĹðŁĴĹ": 47158, + "ðŁĴĹðŁĴĹ": 24064, + "ðŁĴĹðŁĴĹðŁĴĹ": 36990, + "ðŁĴĺ": 18223, + "ðŁĴĺ": 10816, + "ðŁĴĺðŁĴĺ": 40464, + "ðŁĴĻ": 5305, + "ðŁĴĻ": 4074, + "ðŁĴĻðŁĴĻ": 17833, + "ðŁĴĻðŁĴĻ": 27101, + "ðŁĴĻðŁĴĻðŁĴĻ": 30698, + "ðŁĴĻðŁĴĽ": 46804, + "ðŁĴĻðŁĴĽ": 26230, + "ðŁĴĻðŁĴľ": 47931, + "ðŁĴĻðŁĴľ": 42541, + "ðŁĴļ": 8102, + "ðŁĴļ": 6521, + "ðŁĴļðŁĴļ": 27497, + "ðŁĴļðŁĴļ": 46209, + "ðŁĴļðŁĴļðŁĴļ": 46182, + "ðŁĴļðŁĴĽ": 41232, + "ðŁĴĽ": 8221, + "ðŁĴĽ": 6233, + "ðŁĴĽðŁĴĻ": 36337, + "ðŁĴĽðŁĴļ": 37994, + "ðŁĴĽðŁĴĽ": 32420, + "ðŁĴľ": 6832, + "ðŁĴľ": 4882, + "ðŁĴľðŁĴľ": 17280, + "ðŁĴľðŁĴľ": 28211, + "ðŁĴľðŁĴľðŁĴľ": 31004, + "ðŁĴĿ": 36761, + "ðŁĴĿ": 22002, + "ðŁĴŀ": 14862, + "ðŁĴŀ": 8988, + "ðŁĴŀðŁĴŀ": 36448, + "ðŁĴŁ": 49394, + "ðŁĴŁ": 28828, + "ðŁĴŃ": 33848, + "ðŁĵ": 1497, + "ðŁĵ¢": 46560, + "ðŁĵ¢": 20901, + "ðŁĵ£": 48841, + "ðŁĵ£": 21282, + "ðŁĵ°:": 28952, + "ðŁĵ°": 14985, + "ðŁĵ±": 36104, + "ðŁĵ±": 20824, + "ðŁĵ²": 19363, + "ðŁĵ·": 6966, + "ðŁĵ·:": 8294, + "ðŁĵ·": 5551, + "ðŁĵ·@": 40032, + "ðŁĵ¸": 8401, + "ðŁĵ¸:": 10379, + "ðŁĵ¸": 6074, + "ðŁĵ¸@": 39660, + "ðŁĵ¹": 49251, + "ðŁĵº": 21792, + "ðŁĵº:": 29728, + "ðŁĵº": 10450, + "ðŁĵ»": 32711, + "ðŁĵ»": 15882, + "ðŁĵ½": 45361, + "ðŁĵħ": 21277, + "ðŁĵĨ": 23471, + "ðŁĵĪ": 23359, + "ðŁĵĬ": 22244, + "ðŁĵĭ": 46351, + "ðŁĵĮ": 22289, + "ðŁĵį": 25043, + "ðŁĵį:": 36845, + "ðŁĵį": 8903, + "ðŁĵĸ": 49003, + "ðŁĵĸ": 23043, + "ðŁĵļ": 25433, + "ðŁĵļ": 15566, + "ðŁĵĿ": 31888, + "ðŁĵĿ:": 48398, + "ðŁĵĿ": 15853, + "ðŁĵŀ": 24022, + "ðŁĶ": 1428, + "ðŁĶ¥": 3191, + "ðŁĶ¥#": 44354, + "ðŁĶ¥": 3016, + "ðŁĶ¥ðŁĶ¥": 5692, + "ðŁĶ¥ðŁĶ¥": 11771, + "ðŁĶ¥ðŁĶ¥ðŁĶ¥": 11004, + "ðŁĶ¥ðŁĶ¥ðŁĶ¥ðŁĶ¥": 23408, + "ðŁĶ¥ðŁĶ¥ðŁĶ¥ðŁĶ¥": 30989, + "ðŁĶ¥ðŁĶ¥ðŁĶ¥ðŁĶ¥ðŁĶ¥": 48401, + "ðŁĶ¥ðŁĶĹ": 35130, + "ðŁĶª": 47078, + "ðŁĶª": 34545, + "ðŁĶ«": 38116, + "ðŁĶ«": 20583, + "ðŁĶ¬": 44227, + "ðŁĶ®": 38077, + "ðŁĶ´": 12408, + "ðŁĶ´": 10854, + "ðŁĶ´âļªï¸ı": 46879, + "ðŁĶ´âļªï¸ı": 40055, + "ðŁĶµ": 17531, + "ðŁĶµ": 17193, + "ðŁĶµâļªï¸ı": 42412, + "ðŁĶ¶": 42880, + "ðŁĶ¶": 36222, + "ðŁĶ·": 37740, + "ðŁĶ¸": 24200, + "ðŁĶ¹": 19995, + "ðŁĶº": 45561, + "ðŁĶģ": 41299, + "ðŁĶĬ": 32580, + "ðŁĶĬ": 20502, + "ðŁĶİ": 44935, + "ðŁĶij": 35127, + "ðŁĶĴ": 44972, + "ðŁĶĶ": 45753, + "ðŁĶĹ": 47475, + "ðŁĶĹ": 14561, + "ðŁĶĺ": 38995, + "ðŁĶľ": 36011, + "ðŁĶĿ": 44387, + "ðŁĶĿ": 29506, + "ðŁķ": 7692, + "ðŁķº": 33958, + "ðŁķĬ": 42624, + "ðŁķĬ": 37760, + "ðŁĸ": 6269, + "ðŁĸ¤": 17603, + "ðŁĸ¤": 10860, + "ðŁĸ¥": 47990, + "ðŁĹ": 7045, + "ðŁĹ£": 33232, + "ðŁĹ£": 18583, + "ðŁĹ£ï¸ı": 37476, + "ðŁĹĵ": 34335, + "ðŁĹĵ": 28773, + "ðŁĹĵï¸ı": 39847, + "ðŁĺ": 668, + "ðŁĺ¡": 21968, + "ðŁĺ¡": 17452, + "ðŁĺ¡ðŁĺ¡": 37223, + "ðŁĺ¢": 14308, + "ðŁĺ¢": 9925, + "ðŁĺ¢ðŁĺ¢": 32923, + "ðŁĺ¢ðŁĺ¢": 47921, + "ðŁĺ£": 32718, + "ðŁĺ¤": 26872, + "ðŁĺ¤": 20740, + "ðŁĺ¥": 38383, + "ðŁĺ¥": 23951, + "ðŁĺ¨": 38080, + "ðŁĺ©": 9051, + "ðŁĺ©": 9494, + "ðŁĺ©ðŁĺ©": 22820, + "ðŁĺ©ðŁĺ©": 38031, + "ðŁĺ©ðŁĺ©ðŁĺ©": 49063, + "ðŁĺª": 38181, + "ðŁĺª": 22243, + "ðŁĺ«": 25141, + "ðŁĺ«": 22340, + "ðŁĺ¬": 23704, + "ðŁĺ¬": 14549, + "ðŁĺ®": 40163, + "ðŁĺ®": 21616, + "ðŁĺ¯": 37858, + "ðŁĺ°": 34728, + "ðŁĺ±": 10938, + "ðŁĺ±": 9055, + "ðŁĺ±ðŁĺ±": 22061, + "ðŁĺ±ðŁĺ±": 40767, + "ðŁĺ±ðŁĺ±ðŁĺ±": 40909, + "ðŁĺ²": 40460, + "ðŁĺ²": 24620, + "ðŁĺ³": 12047, + "ðŁĺ³": 8223, + "ðŁĺ³ðŁĺ³": 32592, + "ðŁĺ´": 23527, + "ðŁĺ´": 16415, + "ðŁĺ´ðŁĺ´": 49307, + "ðŁĺµ": 39368, + "ðŁĺ¶": 35207, + "ðŁĺ·": 37943, + "ðŁĺ·": 25759, + "ðŁĺ¸": 36912, + "ðŁĺ¹": 26477, + "ðŁĺ¹": 26573, + "ðŁĺ¹ðŁĺ¹": 46287, + "ðŁĺº": 40613, + "ðŁĺ»": 15453, + "ðŁĺ»": 12911, + "ðŁĺ»ðŁĺ»": 34414, + "ðŁĺ¼": 44245, + "ðŁĺ½": 45156, + "ðŁĺĢ": 12832, + "ðŁĺĢ": 7334, + "ðŁĺĢðŁĺĢ": 34503, + "ðŁĺģ": 6967, + "ðŁĺģ": 4821, + "ðŁĺģðŁĺģ": 37900, + "ðŁĺģðŁĺģ": 19213, + "ðŁĺģðŁĺģðŁĺģ": 29083, + "ðŁĺĤ": 1424, + "ðŁĺĤ)": 42643, + "ðŁĺĤ.": 42550, + "ðŁĺĤ": 1558, + "ðŁĺĤâĿ¤ï¸ı": 36412, + "ðŁĺĤðŁijĮ": 42000, + "ðŁĺĤðŁĺĤ": 2286, + "ðŁĺĤðŁĺĤ": 4112, + "ðŁĺĤðŁĺĤðŁĺĤ": 22233, + "ðŁĺĤðŁĺĤðŁĺĤ": 4887, + "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 9936, + "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 11522, + "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 19295, + "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 33415, + "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 48973, + "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 28504, + "ðŁĺĤðŁĺį": 43128, + "ðŁĺĤðŁĺŃ": 28965, + "ðŁĺĤðŁĺŃ": 25802, + "ðŁĺĥ": 14079, + "ðŁĺĥ": 8520, + "ðŁĺĥðŁĺĥ": 38358, + "ðŁĺĦ": 12141, + "ðŁĺĦ": 7624, + "ðŁĺĦðŁĺĦ": 32312, + "ðŁĺħ": 15245, + "ðŁĺħ": 9188, + "ðŁĺħðŁĺħ": 39078, + "ðŁĺĨ": 16541, + "ðŁĺĨ": 10943, + "ðŁĺĨðŁĺĨ": 39503, + "ðŁĺĩ": 21694, + "ðŁĺĩ": 13091, + "ðŁĺĪ": 14377, + "ðŁĺĪ": 9756, + "ðŁĺĪðŁĺĪ": 44473, + "ðŁĺī": 9740, + "ðŁĺī": 4955, + "ðŁĺīðŁĺī": 40430, + "ðŁĺĬ": 4692, + "ðŁĺĬ": 3020, + "ðŁĺĬâĿ¤ï¸ı": 43606, + "ðŁĺĬðŁĺĬ": 12838, + "ðŁĺĬðŁĺĬ": 20842, + "ðŁĺĬðŁĺĬðŁĺĬ": 28685, + "ðŁĺĬðŁĺĬðŁĺĬðŁĺĬ": 35519, + "ðŁĺĭ": 12391, + "ðŁĺĭ": 7203, + "ðŁĺĭðŁĺĭ": 33304, + "ðŁĺĮ": 19221, + "ðŁĺĮ": 12163, + "ðŁĺį": 1796, + "ðŁĺį#": 42357, + "ðŁĺį.": 48579, + "ðŁĺį": 1754, + "ðŁĺįâĿ¤": 29122, + "ðŁĺįâĿ¤ï¸ı": 21945, + "ðŁĺįðŁijĮ": 41005, + "ðŁĺįðŁĴķ": 35946, + "ðŁĺįðŁĶ¥": 46648, + "ðŁĺįðŁĺĤ": 48715, + "ðŁĺįðŁĺį": 3663, + "ðŁĺįðŁĺį": 6471, + "ðŁĺįðŁĺįðŁĺį": 30614, + "ðŁĺįðŁĺįðŁĺį": 7703, + "ðŁĺįðŁĺįðŁĺįðŁĺį": 16603, + "ðŁĺįðŁĺįðŁĺįðŁĺį": 18925, + "ðŁĺįðŁĺįðŁĺįðŁĺįðŁĺį": 32078, + "ðŁĺįðŁĺįðŁĺįðŁĺįðŁĺįðŁĺįðŁĺįðŁĺį": 48683, + "ðŁĺįðŁĺĺ": 29646, + "ðŁĺįðŁĺĺ": 19849, + "ðŁĺįðŁĺŃ": 39555, + "ðŁĺİ": 7426, + "ðŁĺİ": 4345, + "ðŁĺİðŁĺİ": 24048, + "ðŁĺİðŁĺİðŁĺİ": 39742, + "ðŁĺı": 11624, + "ðŁĺı": 6909, + "ðŁĺıðŁĺı": 38151, + "ðŁĺIJ": 38586, + "ðŁĺIJ": 19618, + "ðŁĺij": 32469, + "ðŁĺij": 18937, + "ðŁĺĴ": 20792, + "ðŁĺĴ": 11702, + "ðŁĺĵ": 28733, + "ðŁĺĶ": 19532, + "ðŁĺĶ": 11432, + "ðŁĺķ": 45741, + "ðŁĺķ": 20602, + "ðŁĺĸ": 35006, + "ðŁĺĺ": 4240, + "ðŁĺĺ": 3352, + "ðŁĺĺâĿ¤": 48409, + "ðŁĺĺâĿ¤ï¸ı": 39150, + "ðŁĺĺðŁĺį": 38176, + "ðŁĺĺðŁĺĺ": 15663, + "ðŁĺĺðŁĺĺ": 10507, + "ðŁĺĺðŁĺĺðŁĺĺ": 20208, + "ðŁĺĺðŁĺĺðŁĺĺðŁĺĺ": 44892, + "ðŁĺĻ": 36201, + "ðŁĺĻ": 29209, + "ðŁĺļ": 24897, + "ðŁĺļ": 19102, + "ðŁĺĽ": 24550, + "ðŁĺĽ": 15745, + "ðŁĺľ": 13226, + "ðŁĺľ": 7830, + "ðŁĺľðŁĺľ": 43065, + "ðŁĺĿ": 20064, + "ðŁĺĿ": 12970, + "ðŁĺŀ": 40458, + "ðŁĺŀ": 21103, + "ðŁĺŁ": 46947, + "ðŁĺł": 34094, + "ðŁĺŃ": 2962, + "ðŁĺŃ": 3915, + "ðŁĺŃâĿ¤ï¸ı": 29567, + "ðŁĺŃðŁĴķ": 46306, + "ðŁĺŃðŁĺĤ": 38505, + "ðŁĺŃðŁĺį": 36893, + "ðŁĺŃðŁĺŃ": 5300, + "ðŁĺŃðŁĺŃ": 11834, + "ðŁĺŃðŁĺŃðŁĺŃ": 44089, + "ðŁĺŃðŁĺŃðŁĺŃ": 13116, + "ðŁĺŃðŁĺŃðŁĺŃðŁĺŃ": 19793, + "ðŁĺŃðŁĺŃðŁĺŃðŁĺŃ": 27322, + "ðŁĺŃðŁĺŃðŁĺŃðŁĺŃðŁĺŃ": 43366, + "ðŁĻ": 1478, + "ðŁĻĢ": 43092, + "ðŁĻĤ": 32006, + "ðŁĻĤ": 14860, + "ðŁĻĥ": 27222, + "ðŁĻĥ": 15652, + "ðŁĻĦ": 20648, + "ðŁĻĦ": 13049, + "ðŁĻħ": 42702, + "ðŁĻĨ": 30050, + "ðŁĻĨ": 35730, + "ðŁĻĪ": 12661, + "ðŁĻĪ": 9516, + "ðŁĻĪðŁĻĪ": 41796, + "ðŁĻĬ": 23684, + "ðŁĻĬ": 16636, + "ðŁĻĭ": 19193, + "ðŁĻĭ": 30274, + "ðŁĻĮ": 4366, + "ðŁĻĮ": 4855, + "ðŁĻĮðŁı»": 26756, + "ðŁĻĮðŁı»": 15799, + "ðŁĻĮðŁı¼": 26584, + "ðŁĻĮðŁı¼": 15364, + "ðŁĻĮðŁı½": 36660, + "ðŁĻĮðŁı½": 22962, + "ðŁĻĮðŁı¾": 38023, + "ðŁĻĮðŁı¾": 26466, + "ðŁĻĮðŁĻĮ": 21202, + "ðŁĻĮðŁĻĮ": 30430, + "ðŁĻĮðŁĻĮðŁĻĮ": 37127, + "ðŁĻı": 4260, + "ðŁĻı": 5503, + "ðŁĻıðŁı»": 25100, + "ðŁĻıðŁı»": 16650, + "ðŁĻıðŁı¼": 31163, + "ðŁĻıðŁı¼": 18952, + "ðŁĻıðŁı½": 34103, + "ðŁĻıðŁı½": 21540, + "ðŁĻıðŁı¾": 34277, + "ðŁĻıðŁı¾": 21979, + "ðŁĻıðŁĻı": 18227, + "ðŁĻıðŁĻı": 26510, + "ðŁĻıðŁĻıðŁĻı": 31702, + "ðŁļ": 2730, + "ðŁļ¨": 12198, + "ðŁļ¨": 6056, + "ðŁļ¨ðŁļ¨": 36487, + "ðŁļ¨ðŁļ¨": 21440, + "ðŁļ¨ðŁļ¨ðŁļ¨": 41515, + "ðŁļ©": 44514, + "ðŁļ«": 35291, + "ðŁļ²": 37085, + "ðŁļ´": 30825, + "ðŁļ¶": 46060, + "ðŁļĢ": 22400, + "ðŁļĢ": 13542, + "ðŁļĢðŁļĢ": 49033, + "ðŁļĤ": 38949, + "ðŁļĮ": 46891, + "ðŁļĹ": 33054, + "ðŁļĹ": 22783, + "ðŁļĺ": 35825, + "ðŁļĻ": 48487, + "ðŁĽ": 11306, + "ñ": 173, + "ñ": 429, + "ò": 174, + "ò": 430, + "ó": 175, + "ó": 431, + "ô": 176, + "ô": 432, + "õ": 177, + "õ": 433, + "ö": 178, + "ö": 434, + "÷": 179, + "÷": 435, + "ø": 180, + "ø": 436, + "ù": 181, + "ù": 437, + "ú": 182, + "ú": 438, + "û": 183, + "û": 439, + "ü": 184, + "ü": 440, + "ý": 185, + "ý": 441, + "þ": 186, + "þ": 442, + "ÿ": 187, + "ÿ": 443, + "Ā": 188, + "Ā": 444, + "ā": 189, + "ā": 445, + "Ă": 190, + "Ă": 446, + "ă": 191, + "ă": 447, + "Ą": 192, + "Ą": 448, + "ą": 193, + "ą": 449, + "Ć": 194, + "Ć": 450, + "ć": 195, + "ć": 451, + "Ĉ": 196, + "Ĉ": 452, + "ĉ": 197, + "ĉ": 453, + "Ċ": 198, + "Ċ": 454, + "ċ": 199, + "ċ": 455, + "Č": 200, + "Č": 456, + "č": 201, + "č": 457, + "Ď": 202, + "Ď": 458, + "ď": 203, + "ď": 459, + "Đ": 204, + "Đ": 460, + "đ": 205, + "đ": 461, + "Ē": 206, + "Ē": 462, + "ē": 207, + "ē": 463, + "Ĕ": 208, + "Ĕ": 464, + "ĕ": 209, + "ĕ": 465, + "Ė": 210, + "Ė": 466, + "ė": 211, + "ė": 467, + "Ę": 212, + "Ę": 468, + "ę": 213, + "ę": 469, + "Ě": 214, + "Ě": 470, + "ě": 215, + "ě": 471, + "Ĝ": 216, + "Ĝ": 472, + "ĝ": 217, + "ĝ": 473, + "Ğ": 218, + "Ğ": 474, + "ğ": 219, + "ğ": 475, + "Ġ": 220, + "Ġ": 476, + "ġ": 221, + "ġ": 477, + "Ģ": 222, + "Ģ": 478, + "Ģï¸ı": 9668, + "Ģï¸ı": 5511, + "ģ": 223, + "ģ": 479, + "ģà¸": 15016, + "Ĥ": 224, + "Ĥ": 480, + "Ĥâĸ": 29036, + "ĤâĸĤâĸ": 30832, + "ĥ": 225, + "ĥ": 481, + "Ħ": 226, + "Ħ": 482, + "Ħà¸": 20537, + "Ħë": 34462, + "Ħëĭ": 25170, + "ħ": 227, + "ħ": 483, + "ħï¸ı": 33950, + "Ĩ": 228, + "Ĩ": 484, + "ĩ": 229, + "ĩ": 485, + "Ī": 230, + "Ī": 486, + "ī": 231, + "ī": 487, + "īï¸ı": 37463, + "Ĭ": 232, + "Ĭ": 488, + "Ĭãģ": 30294, + "ĭ": 233, + "ĭ": 489, + "ĭãģ": 36218, + "ĭãĤ": 45737, + "Į": 234, + "Į": 490, + "ĮãĤĬãģ": 45969, + "ĮãĤĬãģŁãģĦ": 47021, + "Įë": 17003, + "į": 235, + "į": 491, + "İ": 236, + "İ": 492, + "ı": 237, + "ı": 493, + "IJ": 238, + "IJ": 494, + "ij": 239, + "ij": 495, + "Ĵ": 240, + "Ĵ": 496, + "ĵ": 241, + "ĵ": 497, + "Ķ": 242, + "Ķ": 498, + "Ķë": 37978, + "Ķï¸ı": 24395, + "Ķï¸ı": 7443, + "ķ": 243, + "ķ": 499, + "ķãĤ": 26609, + "ķï¸ı": 44853, + "ĸ": 244, + "ĸ": 500, + "ĸï¸ı": 28877, + "Ĺ": 245, + "Ĺ": 501, + "ĺ": 246, + "ĺ": 502, + "Ļ": 247, + "Ļ": 503, + "ļ": 248, + "ļ": 504, + "Ľ": 249, + "Ľ": 505, + "ľ": 250, + "ľ": 506, + "ľë": 39810, + "Ŀ": 251, + "Ŀ": 507, + "ŀ": 252, + "ŀ": 508, + "Ł": 253, + "Ł": 509, + "ŁãģĦ": 46023, + "ł": 254, + "ł": 510, + "łï¸ı": 27899, + "łï¸ı": 12715, + "łĪ": 43364, + "Ń": 255, + "Ń": 511 +} diff --git a/ldm_patched/modules/sd2_clip.py b/ldm_patched/modules/sd2_clip.py new file mode 100644 index 0000000000000000000000000000000000000000..41f9e388dc40966e7d68a1cba7d7df11a05983c7 --- /dev/null +++ b/ldm_patched/modules/sd2_clip.py @@ -0,0 +1,24 @@ +from ldm_patched.modules import sd1_clip +import torch +import os + +class SD2ClipHModel(sd1_clip.SDClipModel): + def __init__(self, arch="ViT-H-14", device="cpu", max_length=77, freeze=True, layer="penultimate", layer_idx=None, dtype=None): + if layer == "penultimate": + layer="hidden" + layer_idx=-2 + + textmodel_json_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "sd2_clip_config.json") + super().__init__(device=device, freeze=freeze, layer=layer, layer_idx=layer_idx, textmodel_json_config=textmodel_json_config, dtype=dtype, special_tokens={"start": 49406, "end": 49407, "pad": 0}) + +class SD2ClipHTokenizer(sd1_clip.SDTokenizer): + def __init__(self, tokenizer_path=None, embedding_directory=None): + super().__init__(tokenizer_path, pad_with_end=False, embedding_directory=embedding_directory, embedding_size=1024) + +class SD2Tokenizer(sd1_clip.SD1Tokenizer): + def __init__(self, embedding_directory=None): + super().__init__(embedding_directory=embedding_directory, clip_name="h", tokenizer=SD2ClipHTokenizer) + +class SD2ClipModel(sd1_clip.SD1ClipModel): + def __init__(self, device="cpu", dtype=None, **kwargs): + super().__init__(device=device, dtype=dtype, clip_name="h", clip_model=SD2ClipHModel, **kwargs) diff --git a/ldm_patched/modules/sd2_clip_config.json b/ldm_patched/modules/sd2_clip_config.json new file mode 100644 index 0000000000000000000000000000000000000000..85cec832be9a1d0957245a8d125af398829f247e --- /dev/null +++ b/ldm_patched/modules/sd2_clip_config.json @@ -0,0 +1,23 @@ +{ + "architectures": [ + "CLIPTextModel" + ], + "attention_dropout": 0.0, + "bos_token_id": 0, + "dropout": 0.0, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_size": 1024, + "initializer_factor": 1.0, + "initializer_range": 0.02, + "intermediate_size": 4096, + "layer_norm_eps": 1e-05, + "max_position_embeddings": 77, + "model_type": "clip_text_model", + "num_attention_heads": 16, + "num_hidden_layers": 24, + "pad_token_id": 1, + "projection_dim": 1024, + "torch_dtype": "float32", + "vocab_size": 49408 +} diff --git a/ldm_patched/modules/sdxl_clip.py b/ldm_patched/modules/sdxl_clip.py new file mode 100644 index 0000000000000000000000000000000000000000..9d3d83d82314ac116943601e07eb4bad7b2adb56 --- /dev/null +++ b/ldm_patched/modules/sdxl_clip.py @@ -0,0 +1,66 @@ +from ldm_patched.modules import sd1_clip +import torch +import os + +class SDXLClipG(sd1_clip.SDClipModel): + def __init__(self, device="cpu", max_length=77, freeze=True, layer="penultimate", layer_idx=None, dtype=None): + if layer == "penultimate": + layer="hidden" + layer_idx=-2 + + textmodel_json_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "clip_config_bigg.json") + super().__init__(device=device, freeze=freeze, layer=layer, layer_idx=layer_idx, textmodel_json_config=textmodel_json_config, dtype=dtype, + special_tokens={"start": 49406, "end": 49407, "pad": 0}, layer_norm_hidden_state=False) + + def load_sd(self, sd): + return super().load_sd(sd) + +class SDXLClipGTokenizer(sd1_clip.SDTokenizer): + def __init__(self, tokenizer_path=None, embedding_directory=None): + super().__init__(tokenizer_path, pad_with_end=False, embedding_directory=embedding_directory, embedding_size=1280, embedding_key='clip_g') + + +class SDXLTokenizer: + def __init__(self, embedding_directory=None): + self.clip_l = sd1_clip.SDTokenizer(embedding_directory=embedding_directory) + self.clip_g = SDXLClipGTokenizer(embedding_directory=embedding_directory) + + def tokenize_with_weights(self, text:str, return_word_ids=False): + out = {} + out["g"] = self.clip_g.tokenize_with_weights(text, return_word_ids) + out["l"] = self.clip_l.tokenize_with_weights(text, return_word_ids) + return out + + def untokenize(self, token_weight_pair): + return self.clip_g.untokenize(token_weight_pair) + +class SDXLClipModel(torch.nn.Module): + def __init__(self, device="cpu", dtype=None): + super().__init__() + self.clip_l = sd1_clip.SDClipModel(layer="hidden", layer_idx=-2, device=device, dtype=dtype, layer_norm_hidden_state=False) + self.clip_g = SDXLClipG(device=device, dtype=dtype) + + def clip_layer(self, layer_idx): + self.clip_l.clip_layer(layer_idx) + self.clip_g.clip_layer(layer_idx) + + def reset_clip_layer(self): + self.clip_g.reset_clip_layer() + self.clip_l.reset_clip_layer() + + def encode_token_weights(self, token_weight_pairs): + token_weight_pairs_g = token_weight_pairs["g"] + token_weight_pairs_l = token_weight_pairs["l"] + g_out, g_pooled = self.clip_g.encode_token_weights(token_weight_pairs_g) + l_out, l_pooled = self.clip_l.encode_token_weights(token_weight_pairs_l) + return torch.cat([l_out, g_out], dim=-1), g_pooled + + def load_sd(self, sd): + if "text_model.encoder.layers.30.mlp.fc1.weight" in sd: + return self.clip_g.load_sd(sd) + else: + return self.clip_l.load_sd(sd) + +class SDXLRefinerClipModel(sd1_clip.SD1ClipModel): + def __init__(self, device="cpu", dtype=None): + super().__init__(device=device, dtype=dtype, clip_name="g", clip_model=SDXLClipG) diff --git a/ldm_patched/modules/supported_models.py b/ldm_patched/modules/supported_models.py new file mode 100644 index 0000000000000000000000000000000000000000..1d442d4dd9ca2c56b7bc92a0f142b7dabd600c5a --- /dev/null +++ b/ldm_patched/modules/supported_models.py @@ -0,0 +1,310 @@ +import torch +from . import model_base +from . import utils + +from . import sd1_clip +from . import sd2_clip +from . import sdxl_clip + +from . import supported_models_base +from . import latent_formats + +from . import diffusers_convert + +class SD15(supported_models_base.BASE): + unet_config = { + "context_dim": 768, + "model_channels": 320, + "use_linear_in_transformer": False, + "adm_in_channels": None, + "use_temporal_attention": False, + } + + unet_extra_config = { + "num_heads": 8, + "num_head_channels": -1, + } + + latent_format = latent_formats.SD15 + + def process_clip_state_dict(self, state_dict): + k = list(state_dict.keys()) + for x in k: + if x.startswith("cond_stage_model.transformer.") and not x.startswith("cond_stage_model.transformer.text_model."): + y = x.replace("cond_stage_model.transformer.", "cond_stage_model.transformer.text_model.") + state_dict[y] = state_dict.pop(x) + + if 'cond_stage_model.transformer.text_model.embeddings.position_ids' in state_dict: + ids = state_dict['cond_stage_model.transformer.text_model.embeddings.position_ids'] + if ids.dtype == torch.float32: + state_dict['cond_stage_model.transformer.text_model.embeddings.position_ids'] = ids.round() + + replace_prefix = {} + replace_prefix["cond_stage_model."] = "cond_stage_model.clip_l." + state_dict = utils.state_dict_prefix_replace(state_dict, replace_prefix) + return state_dict + + def process_clip_state_dict_for_saving(self, state_dict): + replace_prefix = {"clip_l.": "cond_stage_model."} + return utils.state_dict_prefix_replace(state_dict, replace_prefix) + + def clip_target(self): + return supported_models_base.ClipTarget(sd1_clip.SD1Tokenizer, sd1_clip.SD1ClipModel) + +class SD20(supported_models_base.BASE): + unet_config = { + "context_dim": 1024, + "model_channels": 320, + "use_linear_in_transformer": True, + "adm_in_channels": None, + "use_temporal_attention": False, + } + + latent_format = latent_formats.SD15 + + def model_type(self, state_dict, prefix=""): + if self.unet_config["in_channels"] == 4: #SD2.0 inpainting models are not v prediction + k = "{}output_blocks.11.1.transformer_blocks.0.norm1.bias".format(prefix) + out = state_dict[k] + if torch.std(out, unbiased=False) > 0.09: # not sure how well this will actually work. I guess we will find out. + return model_base.ModelType.V_PREDICTION + return model_base.ModelType.EPS + + def process_clip_state_dict(self, state_dict): + replace_prefix = {} + replace_prefix["conditioner.embedders.0.model."] = "cond_stage_model.model." #SD2 in sgm format + state_dict = utils.state_dict_prefix_replace(state_dict, replace_prefix) + + state_dict = utils.transformers_convert(state_dict, "cond_stage_model.model.", "cond_stage_model.clip_h.transformer.text_model.", 24) + return state_dict + + def process_clip_state_dict_for_saving(self, state_dict): + replace_prefix = {} + replace_prefix["clip_h"] = "cond_stage_model.model" + state_dict = utils.state_dict_prefix_replace(state_dict, replace_prefix) + state_dict = diffusers_convert.convert_text_enc_state_dict_v20(state_dict) + return state_dict + + def clip_target(self): + return supported_models_base.ClipTarget(sd2_clip.SD2Tokenizer, sd2_clip.SD2ClipModel) + +class SD21UnclipL(SD20): + unet_config = { + "context_dim": 1024, + "model_channels": 320, + "use_linear_in_transformer": True, + "adm_in_channels": 1536, + "use_temporal_attention": False, + } + + clip_vision_prefix = "embedder.model.visual." + noise_aug_config = {"noise_schedule_config": {"timesteps": 1000, "beta_schedule": "squaredcos_cap_v2"}, "timestep_dim": 768} + + +class SD21UnclipH(SD20): + unet_config = { + "context_dim": 1024, + "model_channels": 320, + "use_linear_in_transformer": True, + "adm_in_channels": 2048, + "use_temporal_attention": False, + } + + clip_vision_prefix = "embedder.model.visual." + noise_aug_config = {"noise_schedule_config": {"timesteps": 1000, "beta_schedule": "squaredcos_cap_v2"}, "timestep_dim": 1024} + +class SDXLRefiner(supported_models_base.BASE): + unet_config = { + "model_channels": 384, + "use_linear_in_transformer": True, + "context_dim": 1280, + "adm_in_channels": 2560, + "transformer_depth": [0, 0, 4, 4, 4, 4, 0, 0], + "use_temporal_attention": False, + } + + latent_format = latent_formats.SDXL + + def get_model(self, state_dict, prefix="", device=None): + return model_base.SDXLRefiner(self, device=device) + + def process_clip_state_dict(self, state_dict): + keys_to_replace = {} + replace_prefix = {} + + state_dict = utils.transformers_convert(state_dict, "conditioner.embedders.0.model.", "cond_stage_model.clip_g.transformer.text_model.", 32) + keys_to_replace["conditioner.embedders.0.model.text_projection"] = "cond_stage_model.clip_g.text_projection" + keys_to_replace["conditioner.embedders.0.model.logit_scale"] = "cond_stage_model.clip_g.logit_scale" + + state_dict = utils.state_dict_key_replace(state_dict, keys_to_replace) + return state_dict + + def process_clip_state_dict_for_saving(self, state_dict): + replace_prefix = {} + state_dict_g = diffusers_convert.convert_text_enc_state_dict_v20(state_dict, "clip_g") + if "clip_g.transformer.text_model.embeddings.position_ids" in state_dict_g: + state_dict_g.pop("clip_g.transformer.text_model.embeddings.position_ids") + replace_prefix["clip_g"] = "conditioner.embedders.0.model" + state_dict_g = utils.state_dict_prefix_replace(state_dict_g, replace_prefix) + return state_dict_g + + def clip_target(self): + return supported_models_base.ClipTarget(sdxl_clip.SDXLTokenizer, sdxl_clip.SDXLRefinerClipModel) + +class SDXL(supported_models_base.BASE): + unet_config = { + "model_channels": 320, + "use_linear_in_transformer": True, + "transformer_depth": [0, 0, 2, 2, 10, 10], + "context_dim": 2048, + "adm_in_channels": 2816, + "use_temporal_attention": False, + } + + latent_format = latent_formats.SDXL + + def model_type(self, state_dict, prefix=""): + if "v_pred" in state_dict: + return model_base.ModelType.V_PREDICTION + else: + return model_base.ModelType.EPS + + def get_model(self, state_dict, prefix="", device=None): + out = model_base.SDXL(self, model_type=self.model_type(state_dict, prefix), device=device) + if self.inpaint_model(): + out.set_inpaint() + return out + + def process_clip_state_dict(self, state_dict): + keys_to_replace = {} + replace_prefix = {} + + replace_prefix["conditioner.embedders.0.transformer.text_model"] = "cond_stage_model.clip_l.transformer.text_model" + state_dict = utils.transformers_convert(state_dict, "conditioner.embedders.1.model.", "cond_stage_model.clip_g.transformer.text_model.", 32) + keys_to_replace["conditioner.embedders.1.model.text_projection"] = "cond_stage_model.clip_g.text_projection" + keys_to_replace["conditioner.embedders.1.model.text_projection.weight"] = "cond_stage_model.clip_g.text_projection" + keys_to_replace["conditioner.embedders.1.model.logit_scale"] = "cond_stage_model.clip_g.logit_scale" + + state_dict = utils.state_dict_prefix_replace(state_dict, replace_prefix) + state_dict = utils.state_dict_key_replace(state_dict, keys_to_replace) + return state_dict + + def process_clip_state_dict_for_saving(self, state_dict): + replace_prefix = {} + keys_to_replace = {} + state_dict_g = diffusers_convert.convert_text_enc_state_dict_v20(state_dict, "clip_g") + if "clip_g.transformer.text_model.embeddings.position_ids" in state_dict_g: + state_dict_g.pop("clip_g.transformer.text_model.embeddings.position_ids") + for k in state_dict: + if k.startswith("clip_l"): + state_dict_g[k] = state_dict[k] + + replace_prefix["clip_g"] = "conditioner.embedders.1.model" + replace_prefix["clip_l"] = "conditioner.embedders.0" + state_dict_g = utils.state_dict_prefix_replace(state_dict_g, replace_prefix) + return state_dict_g + + def clip_target(self): + return supported_models_base.ClipTarget(sdxl_clip.SDXLTokenizer, sdxl_clip.SDXLClipModel) + +class SSD1B(SDXL): + unet_config = { + "model_channels": 320, + "use_linear_in_transformer": True, + "transformer_depth": [0, 0, 2, 2, 4, 4], + "context_dim": 2048, + "adm_in_channels": 2816, + "use_temporal_attention": False, + } + +class Segmind_Vega(SDXL): + unet_config = { + "model_channels": 320, + "use_linear_in_transformer": True, + "transformer_depth": [0, 0, 1, 1, 2, 2], + "context_dim": 2048, + "adm_in_channels": 2816, + "use_temporal_attention": False, + } + +class SVD_img2vid(supported_models_base.BASE): + unet_config = { + "model_channels": 320, + "in_channels": 8, + "use_linear_in_transformer": True, + "transformer_depth": [1, 1, 1, 1, 1, 1, 0, 0], + "context_dim": 1024, + "adm_in_channels": 768, + "use_temporal_attention": True, + "use_temporal_resblock": True + } + + clip_vision_prefix = "conditioner.embedders.0.open_clip.model.visual." + + latent_format = latent_formats.SD15 + + sampling_settings = {"sigma_max": 700.0, "sigma_min": 0.002} + + def get_model(self, state_dict, prefix="", device=None): + out = model_base.SVD_img2vid(self, device=device) + return out + + def clip_target(self): + return None + +class Stable_Zero123(supported_models_base.BASE): + unet_config = { + "context_dim": 768, + "model_channels": 320, + "use_linear_in_transformer": False, + "adm_in_channels": None, + "use_temporal_attention": False, + "in_channels": 8, + } + + unet_extra_config = { + "num_heads": 8, + "num_head_channels": -1, + } + + clip_vision_prefix = "cond_stage_model.model.visual." + + latent_format = latent_formats.SD15 + + def get_model(self, state_dict, prefix="", device=None): + out = model_base.Stable_Zero123(self, device=device, cc_projection_weight=state_dict["cc_projection.weight"], cc_projection_bias=state_dict["cc_projection.bias"]) + return out + + def clip_target(self): + return None + +class SD_X4Upscaler(SD20): + unet_config = { + "context_dim": 1024, + "model_channels": 256, + 'in_channels': 7, + "use_linear_in_transformer": True, + "adm_in_channels": None, + "use_temporal_attention": False, + } + + unet_extra_config = { + "disable_self_attentions": [True, True, True, False], + "num_classes": 1000, + "num_heads": 8, + "num_head_channels": -1, + } + + latent_format = latent_formats.SD_X4 + + sampling_settings = { + "linear_start": 0.0001, + "linear_end": 0.02, + } + + def get_model(self, state_dict, prefix="", device=None): + out = model_base.SD_X4Upscaler(self, device=device) + return out + +models = [Stable_Zero123, SD15, SD20, SD21UnclipL, SD21UnclipH, SDXLRefiner, SDXL, SSD1B, Segmind_Vega, SD_X4Upscaler] +models += [SVD_img2vid] diff --git a/ldm_patched/modules/supported_models_base.py b/ldm_patched/modules/supported_models_base.py new file mode 100644 index 0000000000000000000000000000000000000000..5baf4bca6c60ad5e449041e6ce72438811f81d20 --- /dev/null +++ b/ldm_patched/modules/supported_models_base.py @@ -0,0 +1,83 @@ +import torch +from . import model_base +from . import utils +from . import latent_formats + +class ClipTarget: + def __init__(self, tokenizer, clip): + self.clip = clip + self.tokenizer = tokenizer + self.params = {} + +class BASE: + unet_config = {} + unet_extra_config = { + "num_heads": -1, + "num_head_channels": 64, + } + + clip_prefix = [] + clip_vision_prefix = None + noise_aug_config = None + sampling_settings = {} + latent_format = latent_formats.LatentFormat + + manual_cast_dtype = None + + @classmethod + def matches(s, unet_config): + for k in s.unet_config: + if s.unet_config[k] != unet_config[k]: + return False + return True + + def model_type(self, state_dict, prefix=""): + return model_base.ModelType.EPS + + def inpaint_model(self): + return self.unet_config["in_channels"] > 4 + + def __init__(self, unet_config): + self.unet_config = unet_config + self.latent_format = self.latent_format() + for x in self.unet_extra_config: + self.unet_config[x] = self.unet_extra_config[x] + + def get_model(self, state_dict, prefix="", device=None): + if self.noise_aug_config is not None: + out = model_base.SD21UNCLIP(self, self.noise_aug_config, model_type=self.model_type(state_dict, prefix), device=device) + else: + out = model_base.BaseModel(self, model_type=self.model_type(state_dict, prefix), device=device) + if self.inpaint_model(): + out.set_inpaint() + return out + + def process_clip_state_dict(self, state_dict): + return state_dict + + def process_unet_state_dict(self, state_dict): + return state_dict + + def process_vae_state_dict(self, state_dict): + return state_dict + + def process_clip_state_dict_for_saving(self, state_dict): + replace_prefix = {"": "cond_stage_model."} + return utils.state_dict_prefix_replace(state_dict, replace_prefix) + + def process_clip_vision_state_dict_for_saving(self, state_dict): + replace_prefix = {} + if self.clip_vision_prefix is not None: + replace_prefix[""] = self.clip_vision_prefix + return utils.state_dict_prefix_replace(state_dict, replace_prefix) + + def process_unet_state_dict_for_saving(self, state_dict): + replace_prefix = {"": "model.diffusion_model."} + return utils.state_dict_prefix_replace(state_dict, replace_prefix) + + def process_vae_state_dict_for_saving(self, state_dict): + replace_prefix = {"": "first_stage_model."} + return utils.state_dict_prefix_replace(state_dict, replace_prefix) + + def set_manual_cast(self, manual_cast_dtype): + self.manual_cast_dtype = manual_cast_dtype diff --git a/ldm_patched/modules/utils.py b/ldm_patched/modules/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..f8283a86e4682d56aae608a7ffa918c6980d8bac --- /dev/null +++ b/ldm_patched/modules/utils.py @@ -0,0 +1,461 @@ +import torch +import math +import struct +import ldm_patched.modules.checkpoint_pickle +import safetensors.torch +import numpy as np +from PIL import Image + +def load_torch_file(ckpt, safe_load=False, device=None): + if device is None: + device = torch.device("cpu") + if ckpt.lower().endswith(".safetensors"): + sd = safetensors.torch.load_file(ckpt, device=device.type) + else: + if safe_load: + if not 'weights_only' in torch.load.__code__.co_varnames: + print("Warning torch.load doesn't support weights_only on this pytorch version, loading unsafely.") + safe_load = False + if safe_load: + pl_sd = torch.load(ckpt, map_location=device, weights_only=True) + else: + pl_sd = torch.load(ckpt, map_location=device, pickle_module=ldm_patched.modules.checkpoint_pickle) + if "global_step" in pl_sd: + print(f"Global Step: {pl_sd['global_step']}") + if "state_dict" in pl_sd: + sd = pl_sd["state_dict"] + else: + sd = pl_sd + return sd + +def save_torch_file(sd, ckpt, metadata=None): + if metadata is not None: + safetensors.torch.save_file(sd, ckpt, metadata=metadata) + else: + safetensors.torch.save_file(sd, ckpt) + +def calculate_parameters(sd, prefix=""): + params = 0 + for k in sd.keys(): + if k.startswith(prefix): + params += sd[k].nelement() + return params + +def state_dict_key_replace(state_dict, keys_to_replace): + for x in keys_to_replace: + if x in state_dict: + state_dict[keys_to_replace[x]] = state_dict.pop(x) + return state_dict + +def state_dict_prefix_replace(state_dict, replace_prefix, filter_keys=False): + if filter_keys: + out = {} + else: + out = state_dict + for rp in replace_prefix: + replace = list(map(lambda a: (a, "{}{}".format(replace_prefix[rp], a[len(rp):])), filter(lambda a: a.startswith(rp), state_dict.keys()))) + for x in replace: + w = state_dict.pop(x[0]) + out[x[1]] = w + return out + + +def transformers_convert(sd, prefix_from, prefix_to, number): + keys_to_replace = { + "{}positional_embedding": "{}embeddings.position_embedding.weight", + "{}token_embedding.weight": "{}embeddings.token_embedding.weight", + "{}ln_final.weight": "{}final_layer_norm.weight", + "{}ln_final.bias": "{}final_layer_norm.bias", + } + + for k in keys_to_replace: + x = k.format(prefix_from) + if x in sd: + sd[keys_to_replace[k].format(prefix_to)] = sd.pop(x) + + resblock_to_replace = { + "ln_1": "layer_norm1", + "ln_2": "layer_norm2", + "mlp.c_fc": "mlp.fc1", + "mlp.c_proj": "mlp.fc2", + "attn.out_proj": "self_attn.out_proj", + } + + for resblock in range(number): + for x in resblock_to_replace: + for y in ["weight", "bias"]: + k = "{}transformer.resblocks.{}.{}.{}".format(prefix_from, resblock, x, y) + k_to = "{}encoder.layers.{}.{}.{}".format(prefix_to, resblock, resblock_to_replace[x], y) + if k in sd: + sd[k_to] = sd.pop(k) + + for y in ["weight", "bias"]: + k_from = "{}transformer.resblocks.{}.attn.in_proj_{}".format(prefix_from, resblock, y) + if k_from in sd: + weights = sd.pop(k_from) + shape_from = weights.shape[0] // 3 + for x in range(3): + p = ["self_attn.q_proj", "self_attn.k_proj", "self_attn.v_proj"] + k_to = "{}encoder.layers.{}.{}.{}".format(prefix_to, resblock, p[x], y) + sd[k_to] = weights[shape_from*x:shape_from*(x + 1)] + return sd + +UNET_MAP_ATTENTIONS = { + "proj_in.weight", + "proj_in.bias", + "proj_out.weight", + "proj_out.bias", + "norm.weight", + "norm.bias", +} + +TRANSFORMER_BLOCKS = { + "norm1.weight", + "norm1.bias", + "norm2.weight", + "norm2.bias", + "norm3.weight", + "norm3.bias", + "attn1.to_q.weight", + "attn1.to_k.weight", + "attn1.to_v.weight", + "attn1.to_out.0.weight", + "attn1.to_out.0.bias", + "attn2.to_q.weight", + "attn2.to_k.weight", + "attn2.to_v.weight", + "attn2.to_out.0.weight", + "attn2.to_out.0.bias", + "ff.net.0.proj.weight", + "ff.net.0.proj.bias", + "ff.net.2.weight", + "ff.net.2.bias", +} + +UNET_MAP_RESNET = { + "in_layers.2.weight": "conv1.weight", + "in_layers.2.bias": "conv1.bias", + "emb_layers.1.weight": "time_emb_proj.weight", + "emb_layers.1.bias": "time_emb_proj.bias", + "out_layers.3.weight": "conv2.weight", + "out_layers.3.bias": "conv2.bias", + "skip_connection.weight": "conv_shortcut.weight", + "skip_connection.bias": "conv_shortcut.bias", + "in_layers.0.weight": "norm1.weight", + "in_layers.0.bias": "norm1.bias", + "out_layers.0.weight": "norm2.weight", + "out_layers.0.bias": "norm2.bias", +} + +UNET_MAP_BASIC = { + ("label_emb.0.0.weight", "class_embedding.linear_1.weight"), + ("label_emb.0.0.bias", "class_embedding.linear_1.bias"), + ("label_emb.0.2.weight", "class_embedding.linear_2.weight"), + ("label_emb.0.2.bias", "class_embedding.linear_2.bias"), + ("label_emb.0.0.weight", "add_embedding.linear_1.weight"), + ("label_emb.0.0.bias", "add_embedding.linear_1.bias"), + ("label_emb.0.2.weight", "add_embedding.linear_2.weight"), + ("label_emb.0.2.bias", "add_embedding.linear_2.bias"), + ("input_blocks.0.0.weight", "conv_in.weight"), + ("input_blocks.0.0.bias", "conv_in.bias"), + ("out.0.weight", "conv_norm_out.weight"), + ("out.0.bias", "conv_norm_out.bias"), + ("out.2.weight", "conv_out.weight"), + ("out.2.bias", "conv_out.bias"), + ("time_embed.0.weight", "time_embedding.linear_1.weight"), + ("time_embed.0.bias", "time_embedding.linear_1.bias"), + ("time_embed.2.weight", "time_embedding.linear_2.weight"), + ("time_embed.2.bias", "time_embedding.linear_2.bias") +} + +def unet_to_diffusers(unet_config): + num_res_blocks = unet_config["num_res_blocks"] + channel_mult = unet_config["channel_mult"] + transformer_depth = unet_config["transformer_depth"][:] + transformer_depth_output = unet_config["transformer_depth_output"][:] + num_blocks = len(channel_mult) + + transformers_mid = unet_config.get("transformer_depth_middle", None) + + diffusers_unet_map = {} + for x in range(num_blocks): + n = 1 + (num_res_blocks[x] + 1) * x + for i in range(num_res_blocks[x]): + for b in UNET_MAP_RESNET: + diffusers_unet_map["down_blocks.{}.resnets.{}.{}".format(x, i, UNET_MAP_RESNET[b])] = "input_blocks.{}.0.{}".format(n, b) + num_transformers = transformer_depth.pop(0) + if num_transformers > 0: + for b in UNET_MAP_ATTENTIONS: + diffusers_unet_map["down_blocks.{}.attentions.{}.{}".format(x, i, b)] = "input_blocks.{}.1.{}".format(n, b) + for t in range(num_transformers): + for b in TRANSFORMER_BLOCKS: + diffusers_unet_map["down_blocks.{}.attentions.{}.transformer_blocks.{}.{}".format(x, i, t, b)] = "input_blocks.{}.1.transformer_blocks.{}.{}".format(n, t, b) + n += 1 + for k in ["weight", "bias"]: + diffusers_unet_map["down_blocks.{}.downsamplers.0.conv.{}".format(x, k)] = "input_blocks.{}.0.op.{}".format(n, k) + + i = 0 + for b in UNET_MAP_ATTENTIONS: + diffusers_unet_map["mid_block.attentions.{}.{}".format(i, b)] = "middle_block.1.{}".format(b) + for t in range(transformers_mid): + for b in TRANSFORMER_BLOCKS: + diffusers_unet_map["mid_block.attentions.{}.transformer_blocks.{}.{}".format(i, t, b)] = "middle_block.1.transformer_blocks.{}.{}".format(t, b) + + for i, n in enumerate([0, 2]): + for b in UNET_MAP_RESNET: + diffusers_unet_map["mid_block.resnets.{}.{}".format(i, UNET_MAP_RESNET[b])] = "middle_block.{}.{}".format(n, b) + + num_res_blocks = list(reversed(num_res_blocks)) + for x in range(num_blocks): + n = (num_res_blocks[x] + 1) * x + l = num_res_blocks[x] + 1 + for i in range(l): + c = 0 + for b in UNET_MAP_RESNET: + diffusers_unet_map["up_blocks.{}.resnets.{}.{}".format(x, i, UNET_MAP_RESNET[b])] = "output_blocks.{}.0.{}".format(n, b) + c += 1 + num_transformers = transformer_depth_output.pop() + if num_transformers > 0: + c += 1 + for b in UNET_MAP_ATTENTIONS: + diffusers_unet_map["up_blocks.{}.attentions.{}.{}".format(x, i, b)] = "output_blocks.{}.1.{}".format(n, b) + for t in range(num_transformers): + for b in TRANSFORMER_BLOCKS: + diffusers_unet_map["up_blocks.{}.attentions.{}.transformer_blocks.{}.{}".format(x, i, t, b)] = "output_blocks.{}.1.transformer_blocks.{}.{}".format(n, t, b) + if i == l - 1: + for k in ["weight", "bias"]: + diffusers_unet_map["up_blocks.{}.upsamplers.0.conv.{}".format(x, k)] = "output_blocks.{}.{}.conv.{}".format(n, c, k) + n += 1 + + for k in UNET_MAP_BASIC: + diffusers_unet_map[k[1]] = k[0] + + return diffusers_unet_map + +def repeat_to_batch_size(tensor, batch_size): + if tensor.shape[0] > batch_size: + return tensor[:batch_size] + elif tensor.shape[0] < batch_size: + return tensor.repeat([math.ceil(batch_size / tensor.shape[0])] + [1] * (len(tensor.shape) - 1))[:batch_size] + return tensor + +def resize_to_batch_size(tensor, batch_size): + in_batch_size = tensor.shape[0] + if in_batch_size == batch_size: + return tensor + + if batch_size <= 1: + return tensor[:batch_size] + + output = torch.empty([batch_size] + list(tensor.shape)[1:], dtype=tensor.dtype, device=tensor.device) + if batch_size < in_batch_size: + scale = (in_batch_size - 1) / (batch_size - 1) + for i in range(batch_size): + output[i] = tensor[min(round(i * scale), in_batch_size - 1)] + else: + scale = in_batch_size / batch_size + for i in range(batch_size): + output[i] = tensor[min(math.floor((i + 0.5) * scale), in_batch_size - 1)] + + return output + +def convert_sd_to(state_dict, dtype): + keys = list(state_dict.keys()) + for k in keys: + state_dict[k] = state_dict[k].to(dtype) + return state_dict + +def safetensors_header(safetensors_path, max_size=100*1024*1024): + with open(safetensors_path, "rb") as f: + header = f.read(8) + length_of_header = struct.unpack(' max_size: + return None + return f.read(length_of_header) + +def set_attr(obj, attr, value): + attrs = attr.split(".") + for name in attrs[:-1]: + obj = getattr(obj, name) + prev = getattr(obj, attrs[-1]) + setattr(obj, attrs[-1], torch.nn.Parameter(value, requires_grad=False)) + del prev + +def copy_to_param(obj, attr, value): + # inplace update tensor instead of replacing it + attrs = attr.split(".") + for name in attrs[:-1]: + obj = getattr(obj, name) + prev = getattr(obj, attrs[-1]) + prev.data.copy_(value) + +def get_attr(obj, attr): + attrs = attr.split(".") + for name in attrs: + obj = getattr(obj, name) + return obj + +def bislerp(samples, width, height): + def slerp(b1, b2, r): + '''slerps batches b1, b2 according to ratio r, batches should be flat e.g. NxC''' + + c = b1.shape[-1] + + #norms + b1_norms = torch.norm(b1, dim=-1, keepdim=True) + b2_norms = torch.norm(b2, dim=-1, keepdim=True) + + #normalize + b1_normalized = b1 / b1_norms + b2_normalized = b2 / b2_norms + + #zero when norms are zero + b1_normalized[b1_norms.expand(-1,c) == 0.0] = 0.0 + b2_normalized[b2_norms.expand(-1,c) == 0.0] = 0.0 + + #slerp + dot = (b1_normalized*b2_normalized).sum(1) + omega = torch.acos(dot) + so = torch.sin(omega) + + #technically not mathematically correct, but more pleasing? + res = (torch.sin((1.0-r.squeeze(1))*omega)/so).unsqueeze(1)*b1_normalized + (torch.sin(r.squeeze(1)*omega)/so).unsqueeze(1) * b2_normalized + res *= (b1_norms * (1.0-r) + b2_norms * r).expand(-1,c) + + #edge cases for same or polar opposites + res[dot > 1 - 1e-5] = b1[dot > 1 - 1e-5] + res[dot < 1e-5 - 1] = (b1 * (1.0-r) + b2 * r)[dot < 1e-5 - 1] + return res + + def generate_bilinear_data(length_old, length_new, device): + coords_1 = torch.arange(length_old, dtype=torch.float32, device=device).reshape((1,1,1,-1)) + coords_1 = torch.nn.functional.interpolate(coords_1, size=(1, length_new), mode="bilinear") + ratios = coords_1 - coords_1.floor() + coords_1 = coords_1.to(torch.int64) + + coords_2 = torch.arange(length_old, dtype=torch.float32, device=device).reshape((1,1,1,-1)) + 1 + coords_2[:,:,:,-1] -= 1 + coords_2 = torch.nn.functional.interpolate(coords_2, size=(1, length_new), mode="bilinear") + coords_2 = coords_2.to(torch.int64) + return ratios, coords_1, coords_2 + + orig_dtype = samples.dtype + samples = samples.float() + n,c,h,w = samples.shape + h_new, w_new = (height, width) + + #linear w + ratios, coords_1, coords_2 = generate_bilinear_data(w, w_new, samples.device) + coords_1 = coords_1.expand((n, c, h, -1)) + coords_2 = coords_2.expand((n, c, h, -1)) + ratios = ratios.expand((n, 1, h, -1)) + + pass_1 = samples.gather(-1,coords_1).movedim(1, -1).reshape((-1,c)) + pass_2 = samples.gather(-1,coords_2).movedim(1, -1).reshape((-1,c)) + ratios = ratios.movedim(1, -1).reshape((-1,1)) + + result = slerp(pass_1, pass_2, ratios) + result = result.reshape(n, h, w_new, c).movedim(-1, 1) + + #linear h + ratios, coords_1, coords_2 = generate_bilinear_data(h, h_new, samples.device) + coords_1 = coords_1.reshape((1,1,-1,1)).expand((n, c, -1, w_new)) + coords_2 = coords_2.reshape((1,1,-1,1)).expand((n, c, -1, w_new)) + ratios = ratios.reshape((1,1,-1,1)).expand((n, 1, -1, w_new)) + + pass_1 = result.gather(-2,coords_1).movedim(1, -1).reshape((-1,c)) + pass_2 = result.gather(-2,coords_2).movedim(1, -1).reshape((-1,c)) + ratios = ratios.movedim(1, -1).reshape((-1,1)) + + result = slerp(pass_1, pass_2, ratios) + result = result.reshape(n, h_new, w_new, c).movedim(-1, 1) + return result.to(orig_dtype) + +def lanczos(samples, width, height): + images = [Image.fromarray(np.clip(255. * image.movedim(0, -1).cpu().numpy(), 0, 255).astype(np.uint8)) for image in samples] + images = [image.resize((width, height), resample=Image.Resampling.LANCZOS) for image in images] + images = [torch.from_numpy(np.array(image).astype(np.float32) / 255.0).movedim(-1, 0) for image in images] + result = torch.stack(images) + return result.to(samples.device, samples.dtype) + +def common_upscale(samples, width, height, upscale_method, crop): + if crop == "center": + old_width = samples.shape[3] + old_height = samples.shape[2] + old_aspect = old_width / old_height + new_aspect = width / height + x = 0 + y = 0 + if old_aspect > new_aspect: + x = round((old_width - old_width * (new_aspect / old_aspect)) / 2) + elif old_aspect < new_aspect: + y = round((old_height - old_height * (old_aspect / new_aspect)) / 2) + s = samples[:,:,y:old_height-y,x:old_width-x] + else: + s = samples + + if upscale_method == "bislerp": + return bislerp(s, width, height) + elif upscale_method == "lanczos": + return lanczos(s, width, height) + else: + return torch.nn.functional.interpolate(s, size=(height, width), mode=upscale_method) + +def get_tiled_scale_steps(width, height, tile_x, tile_y, overlap): + return math.ceil((height / (tile_y - overlap))) * math.ceil((width / (tile_x - overlap))) + +@torch.inference_mode() +def tiled_scale(samples, function, tile_x=64, tile_y=64, overlap = 8, upscale_amount = 4, out_channels = 3, output_device="cpu", pbar = None): + output = torch.empty((samples.shape[0], out_channels, round(samples.shape[2] * upscale_amount), round(samples.shape[3] * upscale_amount)), device=output_device) + for b in range(samples.shape[0]): + s = samples[b:b+1] + out = torch.zeros((s.shape[0], out_channels, round(s.shape[2] * upscale_amount), round(s.shape[3] * upscale_amount)), device=output_device) + out_div = torch.zeros((s.shape[0], out_channels, round(s.shape[2] * upscale_amount), round(s.shape[3] * upscale_amount)), device=output_device) + for y in range(0, s.shape[2], tile_y - overlap): + for x in range(0, s.shape[3], tile_x - overlap): + s_in = s[:,:,y:y+tile_y,x:x+tile_x] + + ps = function(s_in).to(output_device) + mask = torch.ones_like(ps) + feather = round(overlap * upscale_amount) + for t in range(feather): + mask[:,:,t:1+t,:] *= ((1.0/feather) * (t + 1)) + mask[:,:,mask.shape[2] -1 -t: mask.shape[2]-t,:] *= ((1.0/feather) * (t + 1)) + mask[:,:,:,t:1+t] *= ((1.0/feather) * (t + 1)) + mask[:,:,:,mask.shape[3]- 1 - t: mask.shape[3]- t] *= ((1.0/feather) * (t + 1)) + out[:,:,round(y*upscale_amount):round((y+tile_y)*upscale_amount),round(x*upscale_amount):round((x+tile_x)*upscale_amount)] += ps * mask + out_div[:,:,round(y*upscale_amount):round((y+tile_y)*upscale_amount),round(x*upscale_amount):round((x+tile_x)*upscale_amount)] += mask + if pbar is not None: + pbar.update(1) + + output[b:b+1] = out/out_div + return output + +PROGRESS_BAR_ENABLED = True +def set_progress_bar_enabled(enabled): + global PROGRESS_BAR_ENABLED + PROGRESS_BAR_ENABLED = enabled + +PROGRESS_BAR_HOOK = None +def set_progress_bar_global_hook(function): + global PROGRESS_BAR_HOOK + PROGRESS_BAR_HOOK = function + +class ProgressBar: + def __init__(self, total): + global PROGRESS_BAR_HOOK + self.total = total + self.current = 0 + self.hook = PROGRESS_BAR_HOOK + + def update_absolute(self, value, total=None, preview=None): + if total is not None: + self.total = total + if value > self.total: + value = self.total + self.current = value + if self.hook is not None: + self.hook(self.current, self.total, preview) + + def update(self, value): + self.update_absolute(self.current + value) diff --git a/ldm_patched/pfn/__init__.py b/ldm_patched/pfn/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ldm_patched/pfn/__pycache__/__init__.cpython-310.pyc b/ldm_patched/pfn/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ff5e288297cd96fb5d5b7660967c13a4c3b34a8f Binary files /dev/null and b/ldm_patched/pfn/__pycache__/__init__.cpython-310.pyc differ diff --git a/ldm_patched/pfn/__pycache__/model_loading.cpython-310.pyc b/ldm_patched/pfn/__pycache__/model_loading.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d9fce328cce4b693c738717aa422e68dea5d7251 Binary files /dev/null and b/ldm_patched/pfn/__pycache__/model_loading.cpython-310.pyc differ diff --git a/ldm_patched/pfn/__pycache__/types.cpython-310.pyc b/ldm_patched/pfn/__pycache__/types.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..996b3c5a18d93277e049dadeadec81e364def121 Binary files /dev/null and b/ldm_patched/pfn/__pycache__/types.cpython-310.pyc differ diff --git a/ldm_patched/pfn/architecture/DAT.py b/ldm_patched/pfn/architecture/DAT.py new file mode 100644 index 0000000000000000000000000000000000000000..0bcc26ef422b73cef41744e2203901a3d290c2f0 --- /dev/null +++ b/ldm_patched/pfn/architecture/DAT.py @@ -0,0 +1,1182 @@ +# pylint: skip-file +import math +import re + +import numpy as np +import torch +import torch.nn as nn +import torch.utils.checkpoint as checkpoint +from einops import rearrange +from einops.layers.torch import Rearrange +from torch import Tensor +from torch.nn import functional as F + +from .timm.drop import DropPath +from .timm.weight_init import trunc_normal_ + + +def img2windows(img, H_sp, W_sp): + """ + Input: Image (B, C, H, W) + Output: Window Partition (B', N, C) + """ + B, C, H, W = img.shape + img_reshape = img.view(B, C, H // H_sp, H_sp, W // W_sp, W_sp) + img_perm = ( + img_reshape.permute(0, 2, 4, 3, 5, 1).contiguous().reshape(-1, H_sp * W_sp, C) + ) + return img_perm + + +def windows2img(img_splits_hw, H_sp, W_sp, H, W): + """ + Input: Window Partition (B', N, C) + Output: Image (B, H, W, C) + """ + B = int(img_splits_hw.shape[0] / (H * W / H_sp / W_sp)) + + img = img_splits_hw.view(B, H // H_sp, W // W_sp, H_sp, W_sp, -1) + img = img.permute(0, 1, 3, 2, 4, 5).contiguous().view(B, H, W, -1) + return img + + +class SpatialGate(nn.Module): + """Spatial-Gate. + Args: + dim (int): Half of input channels. + """ + + def __init__(self, dim): + super().__init__() + self.norm = nn.LayerNorm(dim) + self.conv = nn.Conv2d( + dim, dim, kernel_size=3, stride=1, padding=1, groups=dim + ) # DW Conv + + def forward(self, x, H, W): + # Split + x1, x2 = x.chunk(2, dim=-1) + B, N, C = x.shape + x2 = ( + self.conv(self.norm(x2).transpose(1, 2).contiguous().view(B, C // 2, H, W)) + .flatten(2) + .transpose(-1, -2) + .contiguous() + ) + + return x1 * x2 + + +class SGFN(nn.Module): + """Spatial-Gate Feed-Forward Network. + Args: + in_features (int): Number of input channels. + hidden_features (int | None): Number of hidden channels. Default: None + out_features (int | None): Number of output channels. Default: None + act_layer (nn.Module): Activation layer. Default: nn.GELU + drop (float): Dropout rate. Default: 0.0 + """ + + def __init__( + self, + in_features, + hidden_features=None, + out_features=None, + act_layer=nn.GELU, + drop=0.0, + ): + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Linear(in_features, hidden_features) + self.act = act_layer() + self.sg = SpatialGate(hidden_features // 2) + self.fc2 = nn.Linear(hidden_features // 2, out_features) + self.drop = nn.Dropout(drop) + + def forward(self, x, H, W): + """ + Input: x: (B, H*W, C), H, W + Output: x: (B, H*W, C) + """ + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + + x = self.sg(x, H, W) + x = self.drop(x) + + x = self.fc2(x) + x = self.drop(x) + return x + + +class DynamicPosBias(nn.Module): + # The implementation builds on Crossformer code https://github.com/cheerss/CrossFormer/blob/main/models/crossformer.py + """Dynamic Relative Position Bias. + Args: + dim (int): Number of input channels. + num_heads (int): Number of attention heads. + residual (bool): If True, use residual strage to connect conv. + """ + + def __init__(self, dim, num_heads, residual): + super().__init__() + self.residual = residual + self.num_heads = num_heads + self.pos_dim = dim // 4 + self.pos_proj = nn.Linear(2, self.pos_dim) + self.pos1 = nn.Sequential( + nn.LayerNorm(self.pos_dim), + nn.ReLU(inplace=True), + nn.Linear(self.pos_dim, self.pos_dim), + ) + self.pos2 = nn.Sequential( + nn.LayerNorm(self.pos_dim), + nn.ReLU(inplace=True), + nn.Linear(self.pos_dim, self.pos_dim), + ) + self.pos3 = nn.Sequential( + nn.LayerNorm(self.pos_dim), + nn.ReLU(inplace=True), + nn.Linear(self.pos_dim, self.num_heads), + ) + + def forward(self, biases): + if self.residual: + pos = self.pos_proj(biases) # 2Gh-1 * 2Gw-1, heads + pos = pos + self.pos1(pos) + pos = pos + self.pos2(pos) + pos = self.pos3(pos) + else: + pos = self.pos3(self.pos2(self.pos1(self.pos_proj(biases)))) + return pos + + +class Spatial_Attention(nn.Module): + """Spatial Window Self-Attention. + It supports rectangle window (containing square window). + Args: + dim (int): Number of input channels. + idx (int): The indentix of window. (0/1) + split_size (tuple(int)): Height and Width of spatial window. + dim_out (int | None): The dimension of the attention output. Default: None + num_heads (int): Number of attention heads. Default: 6 + attn_drop (float): Dropout ratio of attention weight. Default: 0.0 + proj_drop (float): Dropout ratio of output. Default: 0.0 + qk_scale (float | None): Override default qk scale of head_dim ** -0.5 if set + position_bias (bool): The dynamic relative position bias. Default: True + """ + + def __init__( + self, + dim, + idx, + split_size=[8, 8], + dim_out=None, + num_heads=6, + attn_drop=0.0, + proj_drop=0.0, + qk_scale=None, + position_bias=True, + ): + super().__init__() + self.dim = dim + self.dim_out = dim_out or dim + self.split_size = split_size + self.num_heads = num_heads + self.idx = idx + self.position_bias = position_bias + + head_dim = dim // num_heads + self.scale = qk_scale or head_dim**-0.5 + + if idx == 0: + H_sp, W_sp = self.split_size[0], self.split_size[1] + elif idx == 1: + W_sp, H_sp = self.split_size[0], self.split_size[1] + else: + print("ERROR MODE", idx) + exit(0) + self.H_sp = H_sp + self.W_sp = W_sp + + if self.position_bias: + self.pos = DynamicPosBias(self.dim // 4, self.num_heads, residual=False) + # generate mother-set + position_bias_h = torch.arange(1 - self.H_sp, self.H_sp) + position_bias_w = torch.arange(1 - self.W_sp, self.W_sp) + biases = torch.stack(torch.meshgrid([position_bias_h, position_bias_w])) + biases = biases.flatten(1).transpose(0, 1).contiguous().float() + self.register_buffer("rpe_biases", biases) + + # get pair-wise relative position index for each token inside the window + coords_h = torch.arange(self.H_sp) + coords_w = torch.arange(self.W_sp) + coords = torch.stack(torch.meshgrid([coords_h, coords_w])) + coords_flatten = torch.flatten(coords, 1) + relative_coords = coords_flatten[:, :, None] - coords_flatten[:, None, :] + relative_coords = relative_coords.permute(1, 2, 0).contiguous() + relative_coords[:, :, 0] += self.H_sp - 1 + relative_coords[:, :, 1] += self.W_sp - 1 + relative_coords[:, :, 0] *= 2 * self.W_sp - 1 + relative_position_index = relative_coords.sum(-1) + self.register_buffer("relative_position_index", relative_position_index) + + self.attn_drop = nn.Dropout(attn_drop) + + def im2win(self, x, H, W): + B, N, C = x.shape + x = x.transpose(-2, -1).contiguous().view(B, C, H, W) + x = img2windows(x, self.H_sp, self.W_sp) + x = ( + x.reshape(-1, self.H_sp * self.W_sp, self.num_heads, C // self.num_heads) + .permute(0, 2, 1, 3) + .contiguous() + ) + return x + + def forward(self, qkv, H, W, mask=None): + """ + Input: qkv: (B, 3*L, C), H, W, mask: (B, N, N), N is the window size + Output: x (B, H, W, C) + """ + q, k, v = qkv[0], qkv[1], qkv[2] + + B, L, C = q.shape + assert L == H * W, "flatten img_tokens has wrong size" + + # partition the q,k,v, image to window + q = self.im2win(q, H, W) + k = self.im2win(k, H, W) + v = self.im2win(v, H, W) + + q = q * self.scale + attn = q @ k.transpose(-2, -1) # B head N C @ B head C N --> B head N N + + # calculate drpe + if self.position_bias: + pos = self.pos(self.rpe_biases) + # select position bias + relative_position_bias = pos[self.relative_position_index.view(-1)].view( + self.H_sp * self.W_sp, self.H_sp * self.W_sp, -1 + ) + relative_position_bias = relative_position_bias.permute( + 2, 0, 1 + ).contiguous() + attn = attn + relative_position_bias.unsqueeze(0) + + N = attn.shape[3] + + # use mask for shift window + if mask is not None: + nW = mask.shape[0] + attn = attn.view(B, nW, self.num_heads, N, N) + mask.unsqueeze(1).unsqueeze( + 0 + ) + attn = attn.view(-1, self.num_heads, N, N) + + attn = nn.functional.softmax(attn, dim=-1, dtype=attn.dtype) + attn = self.attn_drop(attn) + + x = attn @ v + x = x.transpose(1, 2).reshape( + -1, self.H_sp * self.W_sp, C + ) # B head N N @ B head N C + + # merge the window, window to image + x = windows2img(x, self.H_sp, self.W_sp, H, W) # B H' W' C + + return x + + +class Adaptive_Spatial_Attention(nn.Module): + # The implementation builds on CAT code https://github.com/Zhengchen1999/CAT + """Adaptive Spatial Self-Attention + Args: + dim (int): Number of input channels. + num_heads (int): Number of attention heads. Default: 6 + split_size (tuple(int)): Height and Width of spatial window. + shift_size (tuple(int)): Shift size for spatial window. + qkv_bias (bool): If True, add a learnable bias to query, key, value. Default: True + qk_scale (float | None): Override default qk scale of head_dim ** -0.5 if set. + drop (float): Dropout rate. Default: 0.0 + attn_drop (float): Attention dropout rate. Default: 0.0 + rg_idx (int): The indentix of Residual Group (RG) + b_idx (int): The indentix of Block in each RG + """ + + def __init__( + self, + dim, + num_heads, + reso=64, + split_size=[8, 8], + shift_size=[1, 2], + qkv_bias=False, + qk_scale=None, + drop=0.0, + attn_drop=0.0, + rg_idx=0, + b_idx=0, + ): + super().__init__() + self.dim = dim + self.num_heads = num_heads + self.split_size = split_size + self.shift_size = shift_size + self.b_idx = b_idx + self.rg_idx = rg_idx + self.patches_resolution = reso + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + + assert ( + 0 <= self.shift_size[0] < self.split_size[0] + ), "shift_size must in 0-split_size0" + assert ( + 0 <= self.shift_size[1] < self.split_size[1] + ), "shift_size must in 0-split_size1" + + self.branch_num = 2 + + self.proj = nn.Linear(dim, dim) + self.proj_drop = nn.Dropout(drop) + + self.attns = nn.ModuleList( + [ + Spatial_Attention( + dim // 2, + idx=i, + split_size=split_size, + num_heads=num_heads // 2, + dim_out=dim // 2, + qk_scale=qk_scale, + attn_drop=attn_drop, + proj_drop=drop, + position_bias=True, + ) + for i in range(self.branch_num) + ] + ) + + if (self.rg_idx % 2 == 0 and self.b_idx > 0 and (self.b_idx - 2) % 4 == 0) or ( + self.rg_idx % 2 != 0 and self.b_idx % 4 == 0 + ): + attn_mask = self.calculate_mask( + self.patches_resolution, self.patches_resolution + ) + self.register_buffer("attn_mask_0", attn_mask[0]) + self.register_buffer("attn_mask_1", attn_mask[1]) + else: + attn_mask = None + self.register_buffer("attn_mask_0", None) + self.register_buffer("attn_mask_1", None) + + self.dwconv = nn.Sequential( + nn.Conv2d(dim, dim, kernel_size=3, stride=1, padding=1, groups=dim), + nn.BatchNorm2d(dim), + nn.GELU(), + ) + self.channel_interaction = nn.Sequential( + nn.AdaptiveAvgPool2d(1), + nn.Conv2d(dim, dim // 8, kernel_size=1), + nn.BatchNorm2d(dim // 8), + nn.GELU(), + nn.Conv2d(dim // 8, dim, kernel_size=1), + ) + self.spatial_interaction = nn.Sequential( + nn.Conv2d(dim, dim // 16, kernel_size=1), + nn.BatchNorm2d(dim // 16), + nn.GELU(), + nn.Conv2d(dim // 16, 1, kernel_size=1), + ) + + def calculate_mask(self, H, W): + # The implementation builds on Swin Transformer code https://github.com/microsoft/Swin-Transformer/blob/main/models/swin_transformer.py + # calculate attention mask for shift window + img_mask_0 = torch.zeros((1, H, W, 1)) # 1 H W 1 idx=0 + img_mask_1 = torch.zeros((1, H, W, 1)) # 1 H W 1 idx=1 + h_slices_0 = ( + slice(0, -self.split_size[0]), + slice(-self.split_size[0], -self.shift_size[0]), + slice(-self.shift_size[0], None), + ) + w_slices_0 = ( + slice(0, -self.split_size[1]), + slice(-self.split_size[1], -self.shift_size[1]), + slice(-self.shift_size[1], None), + ) + + h_slices_1 = ( + slice(0, -self.split_size[1]), + slice(-self.split_size[1], -self.shift_size[1]), + slice(-self.shift_size[1], None), + ) + w_slices_1 = ( + slice(0, -self.split_size[0]), + slice(-self.split_size[0], -self.shift_size[0]), + slice(-self.shift_size[0], None), + ) + cnt = 0 + for h in h_slices_0: + for w in w_slices_0: + img_mask_0[:, h, w, :] = cnt + cnt += 1 + cnt = 0 + for h in h_slices_1: + for w in w_slices_1: + img_mask_1[:, h, w, :] = cnt + cnt += 1 + + # calculate mask for window-0 + img_mask_0 = img_mask_0.view( + 1, + H // self.split_size[0], + self.split_size[0], + W // self.split_size[1], + self.split_size[1], + 1, + ) + img_mask_0 = ( + img_mask_0.permute(0, 1, 3, 2, 4, 5) + .contiguous() + .view(-1, self.split_size[0], self.split_size[1], 1) + ) # nW, sw[0], sw[1], 1 + mask_windows_0 = img_mask_0.view(-1, self.split_size[0] * self.split_size[1]) + attn_mask_0 = mask_windows_0.unsqueeze(1) - mask_windows_0.unsqueeze(2) + attn_mask_0 = attn_mask_0.masked_fill( + attn_mask_0 != 0, float(-100.0) + ).masked_fill(attn_mask_0 == 0, float(0.0)) + + # calculate mask for window-1 + img_mask_1 = img_mask_1.view( + 1, + H // self.split_size[1], + self.split_size[1], + W // self.split_size[0], + self.split_size[0], + 1, + ) + img_mask_1 = ( + img_mask_1.permute(0, 1, 3, 2, 4, 5) + .contiguous() + .view(-1, self.split_size[1], self.split_size[0], 1) + ) # nW, sw[1], sw[0], 1 + mask_windows_1 = img_mask_1.view(-1, self.split_size[1] * self.split_size[0]) + attn_mask_1 = mask_windows_1.unsqueeze(1) - mask_windows_1.unsqueeze(2) + attn_mask_1 = attn_mask_1.masked_fill( + attn_mask_1 != 0, float(-100.0) + ).masked_fill(attn_mask_1 == 0, float(0.0)) + + return attn_mask_0, attn_mask_1 + + def forward(self, x, H, W): + """ + Input: x: (B, H*W, C), H, W + Output: x: (B, H*W, C) + """ + B, L, C = x.shape + assert L == H * W, "flatten img_tokens has wrong size" + + qkv = self.qkv(x).reshape(B, -1, 3, C).permute(2, 0, 1, 3) # 3, B, HW, C + # V without partition + v = qkv[2].transpose(-2, -1).contiguous().view(B, C, H, W) + + # image padding + max_split_size = max(self.split_size[0], self.split_size[1]) + pad_l = pad_t = 0 + pad_r = (max_split_size - W % max_split_size) % max_split_size + pad_b = (max_split_size - H % max_split_size) % max_split_size + + qkv = qkv.reshape(3 * B, H, W, C).permute(0, 3, 1, 2) # 3B C H W + qkv = ( + F.pad(qkv, (pad_l, pad_r, pad_t, pad_b)) + .reshape(3, B, C, -1) + .transpose(-2, -1) + ) # l r t b + _H = pad_b + H + _W = pad_r + W + _L = _H * _W + + # window-0 and window-1 on split channels [C/2, C/2]; for square windows (e.g., 8x8), window-0 and window-1 can be merged + # shift in block: (0, 4, 8, ...), (2, 6, 10, ...), (0, 4, 8, ...), (2, 6, 10, ...), ... + if (self.rg_idx % 2 == 0 and self.b_idx > 0 and (self.b_idx - 2) % 4 == 0) or ( + self.rg_idx % 2 != 0 and self.b_idx % 4 == 0 + ): + qkv = qkv.view(3, B, _H, _W, C) + qkv_0 = torch.roll( + qkv[:, :, :, :, : C // 2], + shifts=(-self.shift_size[0], -self.shift_size[1]), + dims=(2, 3), + ) + qkv_0 = qkv_0.view(3, B, _L, C // 2) + qkv_1 = torch.roll( + qkv[:, :, :, :, C // 2 :], + shifts=(-self.shift_size[1], -self.shift_size[0]), + dims=(2, 3), + ) + qkv_1 = qkv_1.view(3, B, _L, C // 2) + + if self.patches_resolution != _H or self.patches_resolution != _W: + mask_tmp = self.calculate_mask(_H, _W) + x1_shift = self.attns[0](qkv_0, _H, _W, mask=mask_tmp[0].to(x.device)) + x2_shift = self.attns[1](qkv_1, _H, _W, mask=mask_tmp[1].to(x.device)) + else: + x1_shift = self.attns[0](qkv_0, _H, _W, mask=self.attn_mask_0) + x2_shift = self.attns[1](qkv_1, _H, _W, mask=self.attn_mask_1) + + x1 = torch.roll( + x1_shift, shifts=(self.shift_size[0], self.shift_size[1]), dims=(1, 2) + ) + x2 = torch.roll( + x2_shift, shifts=(self.shift_size[1], self.shift_size[0]), dims=(1, 2) + ) + x1 = x1[:, :H, :W, :].reshape(B, L, C // 2) + x2 = x2[:, :H, :W, :].reshape(B, L, C // 2) + # attention output + attened_x = torch.cat([x1, x2], dim=2) + + else: + x1 = self.attns[0](qkv[:, :, :, : C // 2], _H, _W)[:, :H, :W, :].reshape( + B, L, C // 2 + ) + x2 = self.attns[1](qkv[:, :, :, C // 2 :], _H, _W)[:, :H, :W, :].reshape( + B, L, C // 2 + ) + # attention output + attened_x = torch.cat([x1, x2], dim=2) + + # convolution output + conv_x = self.dwconv(v) + + # Adaptive Interaction Module (AIM) + # C-Map (before sigmoid) + channel_map = ( + self.channel_interaction(conv_x) + .permute(0, 2, 3, 1) + .contiguous() + .view(B, 1, C) + ) + # S-Map (before sigmoid) + attention_reshape = attened_x.transpose(-2, -1).contiguous().view(B, C, H, W) + spatial_map = self.spatial_interaction(attention_reshape) + + # C-I + attened_x = attened_x * torch.sigmoid(channel_map) + # S-I + conv_x = torch.sigmoid(spatial_map) * conv_x + conv_x = conv_x.permute(0, 2, 3, 1).contiguous().view(B, L, C) + + x = attened_x + conv_x + + x = self.proj(x) + x = self.proj_drop(x) + + return x + + +class Adaptive_Channel_Attention(nn.Module): + # The implementation builds on XCiT code https://github.com/facebookresearch/xcit + """Adaptive Channel Self-Attention + Args: + dim (int): Number of input channels. + num_heads (int): Number of attention heads. Default: 6 + qkv_bias (bool): If True, add a learnable bias to query, key, value. Default: True + qk_scale (float | None): Override default qk scale of head_dim ** -0.5 if set. + attn_drop (float): Attention dropout rate. Default: 0.0 + drop_path (float): Stochastic depth rate. Default: 0.0 + """ + + def __init__( + self, + dim, + num_heads=8, + qkv_bias=False, + qk_scale=None, + attn_drop=0.0, + proj_drop=0.0, + ): + super().__init__() + self.num_heads = num_heads + self.temperature = nn.Parameter(torch.ones(num_heads, 1, 1)) + + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(dim, dim) + self.proj_drop = nn.Dropout(proj_drop) + + self.dwconv = nn.Sequential( + nn.Conv2d(dim, dim, kernel_size=3, stride=1, padding=1, groups=dim), + nn.BatchNorm2d(dim), + nn.GELU(), + ) + self.channel_interaction = nn.Sequential( + nn.AdaptiveAvgPool2d(1), + nn.Conv2d(dim, dim // 8, kernel_size=1), + nn.BatchNorm2d(dim // 8), + nn.GELU(), + nn.Conv2d(dim // 8, dim, kernel_size=1), + ) + self.spatial_interaction = nn.Sequential( + nn.Conv2d(dim, dim // 16, kernel_size=1), + nn.BatchNorm2d(dim // 16), + nn.GELU(), + nn.Conv2d(dim // 16, 1, kernel_size=1), + ) + + def forward(self, x, H, W): + """ + Input: x: (B, H*W, C), H, W + Output: x: (B, H*W, C) + """ + B, N, C = x.shape + qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads) + qkv = qkv.permute(2, 0, 3, 1, 4) + q, k, v = qkv[0], qkv[1], qkv[2] + + q = q.transpose(-2, -1) + k = k.transpose(-2, -1) + v = v.transpose(-2, -1) + + v_ = v.reshape(B, C, N).contiguous().view(B, C, H, W) + + q = torch.nn.functional.normalize(q, dim=-1) + k = torch.nn.functional.normalize(k, dim=-1) + + attn = (q @ k.transpose(-2, -1)) * self.temperature + attn = attn.softmax(dim=-1) + attn = self.attn_drop(attn) + + # attention output + attened_x = (attn @ v).permute(0, 3, 1, 2).reshape(B, N, C) + + # convolution output + conv_x = self.dwconv(v_) + + # Adaptive Interaction Module (AIM) + # C-Map (before sigmoid) + attention_reshape = attened_x.transpose(-2, -1).contiguous().view(B, C, H, W) + channel_map = self.channel_interaction(attention_reshape) + # S-Map (before sigmoid) + spatial_map = ( + self.spatial_interaction(conv_x) + .permute(0, 2, 3, 1) + .contiguous() + .view(B, N, 1) + ) + + # S-I + attened_x = attened_x * torch.sigmoid(spatial_map) + # C-I + conv_x = conv_x * torch.sigmoid(channel_map) + conv_x = conv_x.permute(0, 2, 3, 1).contiguous().view(B, N, C) + + x = attened_x + conv_x + + x = self.proj(x) + x = self.proj_drop(x) + + return x + + +class DATB(nn.Module): + def __init__( + self, + dim, + num_heads, + reso=64, + split_size=[2, 4], + shift_size=[1, 2], + expansion_factor=4.0, + qkv_bias=False, + qk_scale=None, + drop=0.0, + attn_drop=0.0, + drop_path=0.0, + act_layer=nn.GELU, + norm_layer=nn.LayerNorm, + rg_idx=0, + b_idx=0, + ): + super().__init__() + + self.norm1 = norm_layer(dim) + + if b_idx % 2 == 0: + # DSTB + self.attn = Adaptive_Spatial_Attention( + dim, + num_heads=num_heads, + reso=reso, + split_size=split_size, + shift_size=shift_size, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + drop=drop, + attn_drop=attn_drop, + rg_idx=rg_idx, + b_idx=b_idx, + ) + else: + # DCTB + self.attn = Adaptive_Channel_Attention( + dim, + num_heads=num_heads, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + attn_drop=attn_drop, + proj_drop=drop, + ) + self.drop_path = DropPath(drop_path) if drop_path > 0.0 else nn.Identity() + + ffn_hidden_dim = int(dim * expansion_factor) + self.ffn = SGFN( + in_features=dim, + hidden_features=ffn_hidden_dim, + out_features=dim, + act_layer=act_layer, + ) + self.norm2 = norm_layer(dim) + + def forward(self, x, x_size): + """ + Input: x: (B, H*W, C), x_size: (H, W) + Output: x: (B, H*W, C) + """ + H, W = x_size + x = x + self.drop_path(self.attn(self.norm1(x), H, W)) + x = x + self.drop_path(self.ffn(self.norm2(x), H, W)) + + return x + + +class ResidualGroup(nn.Module): + """ResidualGroup + Args: + dim (int): Number of input channels. + reso (int): Input resolution. + num_heads (int): Number of attention heads. + split_size (tuple(int)): Height and Width of spatial window. + expansion_factor (float): Ratio of ffn hidden dim to embedding dim. + qkv_bias (bool): If True, add a learnable bias to query, key, value. Default: True + qk_scale (float | None): Override default qk scale of head_dim ** -0.5 if set. Default: None + drop (float): Dropout rate. Default: 0 + attn_drop(float): Attention dropout rate. Default: 0 + drop_paths (float | None): Stochastic depth rate. + act_layer (nn.Module): Activation layer. Default: nn.GELU + norm_layer (nn.Module): Normalization layer. Default: nn.LayerNorm + depth (int): Number of dual aggregation Transformer blocks in residual group. + use_chk (bool): Whether to use checkpointing to save memory. + resi_connection: The convolutional block before residual connection. '1conv'/'3conv' + """ + + def __init__( + self, + dim, + reso, + num_heads, + split_size=[2, 4], + expansion_factor=4.0, + qkv_bias=False, + qk_scale=None, + drop=0.0, + attn_drop=0.0, + drop_paths=None, + act_layer=nn.GELU, + norm_layer=nn.LayerNorm, + depth=2, + use_chk=False, + resi_connection="1conv", + rg_idx=0, + ): + super().__init__() + self.use_chk = use_chk + self.reso = reso + + self.blocks = nn.ModuleList( + [ + DATB( + dim=dim, + num_heads=num_heads, + reso=reso, + split_size=split_size, + shift_size=[split_size[0] // 2, split_size[1] // 2], + expansion_factor=expansion_factor, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + drop=drop, + attn_drop=attn_drop, + drop_path=drop_paths[i], + act_layer=act_layer, + norm_layer=norm_layer, + rg_idx=rg_idx, + b_idx=i, + ) + for i in range(depth) + ] + ) + + if resi_connection == "1conv": + self.conv = nn.Conv2d(dim, dim, 3, 1, 1) + elif resi_connection == "3conv": + self.conv = nn.Sequential( + nn.Conv2d(dim, dim // 4, 3, 1, 1), + nn.LeakyReLU(negative_slope=0.2, inplace=True), + nn.Conv2d(dim // 4, dim // 4, 1, 1, 0), + nn.LeakyReLU(negative_slope=0.2, inplace=True), + nn.Conv2d(dim // 4, dim, 3, 1, 1), + ) + + def forward(self, x, x_size): + """ + Input: x: (B, H*W, C), x_size: (H, W) + Output: x: (B, H*W, C) + """ + H, W = x_size + res = x + for blk in self.blocks: + if self.use_chk: + x = checkpoint.checkpoint(blk, x, x_size) + else: + x = blk(x, x_size) + x = rearrange(x, "b (h w) c -> b c h w", h=H, w=W) + x = self.conv(x) + x = rearrange(x, "b c h w -> b (h w) c") + x = res + x + + return x + + +class Upsample(nn.Sequential): + """Upsample module. + Args: + scale (int): Scale factor. Supported scales: 2^n and 3. + num_feat (int): Channel number of intermediate features. + """ + + def __init__(self, scale, num_feat): + m = [] + if (scale & (scale - 1)) == 0: # scale = 2^n + for _ in range(int(math.log(scale, 2))): + m.append(nn.Conv2d(num_feat, 4 * num_feat, 3, 1, 1)) + m.append(nn.PixelShuffle(2)) + elif scale == 3: + m.append(nn.Conv2d(num_feat, 9 * num_feat, 3, 1, 1)) + m.append(nn.PixelShuffle(3)) + else: + raise ValueError( + f"scale {scale} is not supported. " "Supported scales: 2^n and 3." + ) + super(Upsample, self).__init__(*m) + + +class UpsampleOneStep(nn.Sequential): + """UpsampleOneStep module (the difference with Upsample is that it always only has 1conv + 1pixelshuffle) + Used in lightweight SR to save parameters. + + Args: + scale (int): Scale factor. Supported scales: 2^n and 3. + num_feat (int): Channel number of intermediate features. + + """ + + def __init__(self, scale, num_feat, num_out_ch, input_resolution=None): + self.num_feat = num_feat + self.input_resolution = input_resolution + m = [] + m.append(nn.Conv2d(num_feat, (scale**2) * num_out_ch, 3, 1, 1)) + m.append(nn.PixelShuffle(scale)) + super(UpsampleOneStep, self).__init__(*m) + + def flops(self): + h, w = self.input_resolution + flops = h * w * self.num_feat * 3 * 9 + return flops + + +class DAT(nn.Module): + """Dual Aggregation Transformer + Args: + img_size (int): Input image size. Default: 64 + in_chans (int): Number of input image channels. Default: 3 + embed_dim (int): Patch embedding dimension. Default: 180 + depths (tuple(int)): Depth of each residual group (number of DATB in each RG). + split_size (tuple(int)): Height and Width of spatial window. + num_heads (tuple(int)): Number of attention heads in different residual groups. + expansion_factor (float): Ratio of ffn hidden dim to embedding dim. Default: 4 + qkv_bias (bool): If True, add a learnable bias to query, key, value. Default: True + qk_scale (float | None): Override default qk scale of head_dim ** -0.5 if set. Default: None + drop_rate (float): Dropout rate. Default: 0 + attn_drop_rate (float): Attention dropout rate. Default: 0 + drop_path_rate (float): Stochastic depth rate. Default: 0.1 + act_layer (nn.Module): Activation layer. Default: nn.GELU + norm_layer (nn.Module): Normalization layer. Default: nn.LayerNorm + use_chk (bool): Whether to use checkpointing to save memory. + upscale: Upscale factor. 2/3/4 for image SR + img_range: Image range. 1. or 255. + resi_connection: The convolutional block before residual connection. '1conv'/'3conv' + """ + + def __init__(self, state_dict): + super().__init__() + + # defaults + img_size = 64 + in_chans = 3 + embed_dim = 180 + split_size = [2, 4] + depth = [2, 2, 2, 2] + num_heads = [2, 2, 2, 2] + expansion_factor = 4.0 + qkv_bias = True + qk_scale = None + drop_rate = 0.0 + attn_drop_rate = 0.0 + drop_path_rate = 0.1 + act_layer = nn.GELU + norm_layer = nn.LayerNorm + use_chk = False + upscale = 2 + img_range = 1.0 + resi_connection = "1conv" + upsampler = "pixelshuffle" + + self.model_arch = "DAT" + self.sub_type = "SR" + self.state = state_dict + + state_keys = state_dict.keys() + if "conv_before_upsample.0.weight" in state_keys: + if "conv_up1.weight" in state_keys: + upsampler = "nearest+conv" + else: + upsampler = "pixelshuffle" + supports_fp16 = False + elif "upsample.0.weight" in state_keys: + upsampler = "pixelshuffledirect" + else: + upsampler = "" + + num_feat = ( + state_dict.get("conv_before_upsample.0.weight", None).shape[1] + if state_dict.get("conv_before_upsample.weight", None) + else 64 + ) + + num_in_ch = state_dict["conv_first.weight"].shape[1] + in_chans = num_in_ch + if "conv_last.weight" in state_keys: + num_out_ch = state_dict["conv_last.weight"].shape[0] + else: + num_out_ch = num_in_ch + + upscale = 1 + if upsampler == "nearest+conv": + upsample_keys = [ + x for x in state_keys if "conv_up" in x and "bias" not in x + ] + + for upsample_key in upsample_keys: + upscale *= 2 + elif upsampler == "pixelshuffle": + upsample_keys = [ + x + for x in state_keys + if "upsample" in x and "conv" not in x and "bias" not in x + ] + for upsample_key in upsample_keys: + shape = state_dict[upsample_key].shape[0] + upscale *= math.sqrt(shape // num_feat) + upscale = int(upscale) + elif upsampler == "pixelshuffledirect": + upscale = int( + math.sqrt(state_dict["upsample.0.bias"].shape[0] // num_out_ch) + ) + + max_layer_num = 0 + max_block_num = 0 + for key in state_keys: + result = re.match(r"layers.(\d*).blocks.(\d*).norm1.weight", key) + if result: + layer_num, block_num = result.groups() + max_layer_num = max(max_layer_num, int(layer_num)) + max_block_num = max(max_block_num, int(block_num)) + + depth = [max_block_num + 1 for _ in range(max_layer_num + 1)] + + if "layers.0.blocks.1.attn.temperature" in state_keys: + num_heads_num = state_dict["layers.0.blocks.1.attn.temperature"].shape[0] + num_heads = [num_heads_num for _ in range(max_layer_num + 1)] + else: + num_heads = depth + + embed_dim = state_dict["conv_first.weight"].shape[0] + expansion_factor = float( + state_dict["layers.0.blocks.0.ffn.fc1.weight"].shape[0] / embed_dim + ) + + # TODO: could actually count the layers, but this should do + if "layers.0.conv.4.weight" in state_keys: + resi_connection = "3conv" + else: + resi_connection = "1conv" + + if "layers.0.blocks.2.attn.attn_mask_0" in state_keys: + attn_mask_0_x, attn_mask_0_y, attn_mask_0_z = state_dict[ + "layers.0.blocks.2.attn.attn_mask_0" + ].shape + + img_size = int(math.sqrt(attn_mask_0_x * attn_mask_0_y)) + + if "layers.0.blocks.0.attn.attns.0.rpe_biases" in state_keys: + split_sizes = ( + state_dict["layers.0.blocks.0.attn.attns.0.rpe_biases"][-1] + 1 + ) + split_size = [int(x) for x in split_sizes] + + self.in_nc = num_in_ch + self.out_nc = num_out_ch + self.num_feat = num_feat + self.embed_dim = embed_dim + self.num_heads = num_heads + self.depth = depth + self.scale = upscale + self.upsampler = upsampler + self.img_size = img_size + self.img_range = img_range + self.expansion_factor = expansion_factor + self.resi_connection = resi_connection + self.split_size = split_size + + self.supports_fp16 = False # Too much weirdness to support this at the moment + self.supports_bfp16 = True + self.min_size_restriction = 16 + + num_in_ch = in_chans + num_out_ch = in_chans + num_feat = 64 + self.img_range = img_range + if in_chans == 3: + rgb_mean = (0.4488, 0.4371, 0.4040) + self.mean = torch.Tensor(rgb_mean).view(1, 3, 1, 1) + else: + self.mean = torch.zeros(1, 1, 1, 1) + self.upscale = upscale + self.upsampler = upsampler + + # ------------------------- 1, Shallow Feature Extraction ------------------------- # + self.conv_first = nn.Conv2d(num_in_ch, embed_dim, 3, 1, 1) + + # ------------------------- 2, Deep Feature Extraction ------------------------- # + self.num_layers = len(depth) + self.use_chk = use_chk + self.num_features = ( + self.embed_dim + ) = embed_dim # num_features for consistency with other models + heads = num_heads + + self.before_RG = nn.Sequential( + Rearrange("b c h w -> b (h w) c"), nn.LayerNorm(embed_dim) + ) + + curr_dim = embed_dim + dpr = [ + x.item() for x in torch.linspace(0, drop_path_rate, np.sum(depth)) + ] # stochastic depth decay rule + + self.layers = nn.ModuleList() + for i in range(self.num_layers): + layer = ResidualGroup( + dim=embed_dim, + num_heads=heads[i], + reso=img_size, + split_size=split_size, + expansion_factor=expansion_factor, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + drop=drop_rate, + attn_drop=attn_drop_rate, + drop_paths=dpr[sum(depth[:i]) : sum(depth[: i + 1])], + act_layer=act_layer, + norm_layer=norm_layer, + depth=depth[i], + use_chk=use_chk, + resi_connection=resi_connection, + rg_idx=i, + ) + self.layers.append(layer) + + self.norm = norm_layer(curr_dim) + # build the last conv layer in deep feature extraction + if resi_connection == "1conv": + self.conv_after_body = nn.Conv2d(embed_dim, embed_dim, 3, 1, 1) + elif resi_connection == "3conv": + # to save parameters and memory + self.conv_after_body = nn.Sequential( + nn.Conv2d(embed_dim, embed_dim // 4, 3, 1, 1), + nn.LeakyReLU(negative_slope=0.2, inplace=True), + nn.Conv2d(embed_dim // 4, embed_dim // 4, 1, 1, 0), + nn.LeakyReLU(negative_slope=0.2, inplace=True), + nn.Conv2d(embed_dim // 4, embed_dim, 3, 1, 1), + ) + + # ------------------------- 3, Reconstruction ------------------------- # + if self.upsampler == "pixelshuffle": + # for classical SR + self.conv_before_upsample = nn.Sequential( + nn.Conv2d(embed_dim, num_feat, 3, 1, 1), nn.LeakyReLU(inplace=True) + ) + self.upsample = Upsample(upscale, num_feat) + self.conv_last = nn.Conv2d(num_feat, num_out_ch, 3, 1, 1) + elif self.upsampler == "pixelshuffledirect": + # for lightweight SR (to save parameters) + self.upsample = UpsampleOneStep( + upscale, embed_dim, num_out_ch, (img_size, img_size) + ) + + self.apply(self._init_weights) + self.load_state_dict(state_dict, strict=True) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=0.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance( + m, (nn.LayerNorm, nn.BatchNorm2d, nn.GroupNorm, nn.InstanceNorm2d) + ): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + def forward_features(self, x): + _, _, H, W = x.shape + x_size = [H, W] + x = self.before_RG(x) + for layer in self.layers: + x = layer(x, x_size) + x = self.norm(x) + x = rearrange(x, "b (h w) c -> b c h w", h=H, w=W) + + return x + + def forward(self, x): + """ + Input: x: (B, C, H, W) + """ + self.mean = self.mean.type_as(x) + x = (x - self.mean) * self.img_range + + if self.upsampler == "pixelshuffle": + # for image SR + x = self.conv_first(x) + x = self.conv_after_body(self.forward_features(x)) + x + x = self.conv_before_upsample(x) + x = self.conv_last(self.upsample(x)) + elif self.upsampler == "pixelshuffledirect": + # for lightweight SR + x = self.conv_first(x) + x = self.conv_after_body(self.forward_features(x)) + x + x = self.upsample(x) + + x = x / self.img_range + self.mean + return x diff --git a/ldm_patched/pfn/architecture/HAT.py b/ldm_patched/pfn/architecture/HAT.py new file mode 100644 index 0000000000000000000000000000000000000000..7e12ad0fd47a223c4b2a902d296ebeff3403cffb --- /dev/null +++ b/ldm_patched/pfn/architecture/HAT.py @@ -0,0 +1,1277 @@ +# pylint: skip-file +# HAT from https://github.com/XPixelGroup/HAT/blob/main/hat/archs/hat_arch.py +import math +import re + +import torch +import torch.nn as nn +import torch.nn.functional as F +from einops import rearrange + +from .timm.helpers import to_2tuple +from .timm.weight_init import trunc_normal_ + + +def drop_path(x, drop_prob: float = 0.0, training: bool = False): + """Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks). + From: https://github.com/huggingface/pytorch-image-models/blob/main/timm/layers/drop.py + """ + if drop_prob == 0.0 or not training: + return x + keep_prob = 1 - drop_prob + shape = (x.shape[0],) + (1,) * ( + x.ndim - 1 + ) # work with diff dim tensors, not just 2D ConvNets + random_tensor = keep_prob + torch.rand(shape, dtype=x.dtype, device=x.device) + random_tensor.floor_() # binarize + output = x.div(keep_prob) * random_tensor + return output + + +class DropPath(nn.Module): + """Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks). + From: https://github.com/huggingface/pytorch-image-models/blob/main/timm/layers/drop.py + """ + + def __init__(self, drop_prob=None): + super(DropPath, self).__init__() + self.drop_prob = drop_prob + + def forward(self, x): + return drop_path(x, self.drop_prob, self.training) # type: ignore + + +class ChannelAttention(nn.Module): + """Channel attention used in RCAN. + Args: + num_feat (int): Channel number of intermediate features. + squeeze_factor (int): Channel squeeze factor. Default: 16. + """ + + def __init__(self, num_feat, squeeze_factor=16): + super(ChannelAttention, self).__init__() + self.attention = nn.Sequential( + nn.AdaptiveAvgPool2d(1), + nn.Conv2d(num_feat, num_feat // squeeze_factor, 1, padding=0), + nn.ReLU(inplace=True), + nn.Conv2d(num_feat // squeeze_factor, num_feat, 1, padding=0), + nn.Sigmoid(), + ) + + def forward(self, x): + y = self.attention(x) + return x * y + + +class CAB(nn.Module): + def __init__(self, num_feat, compress_ratio=3, squeeze_factor=30): + super(CAB, self).__init__() + + self.cab = nn.Sequential( + nn.Conv2d(num_feat, num_feat // compress_ratio, 3, 1, 1), + nn.GELU(), + nn.Conv2d(num_feat // compress_ratio, num_feat, 3, 1, 1), + ChannelAttention(num_feat, squeeze_factor), + ) + + def forward(self, x): + return self.cab(x) + + +class Mlp(nn.Module): + def __init__( + self, + in_features, + hidden_features=None, + out_features=None, + act_layer=nn.GELU, + drop=0.0, + ): + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Linear(in_features, hidden_features) + self.act = act_layer() + self.fc2 = nn.Linear(hidden_features, out_features) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + + +def window_partition(x, window_size): + """ + Args: + x: (b, h, w, c) + window_size (int): window size + Returns: + windows: (num_windows*b, window_size, window_size, c) + """ + b, h, w, c = x.shape + x = x.view(b, h // window_size, window_size, w // window_size, window_size, c) + windows = ( + x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, window_size, window_size, c) + ) + return windows + + +def window_reverse(windows, window_size, h, w): + """ + Args: + windows: (num_windows*b, window_size, window_size, c) + window_size (int): Window size + h (int): Height of image + w (int): Width of image + Returns: + x: (b, h, w, c) + """ + b = int(windows.shape[0] / (h * w / window_size / window_size)) + x = windows.view( + b, h // window_size, w // window_size, window_size, window_size, -1 + ) + x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(b, h, w, -1) + return x + + +class WindowAttention(nn.Module): + r"""Window based multi-head self attention (W-MSA) module with relative position bias. + It supports both of shifted and non-shifted window. + Args: + dim (int): Number of input channels. + window_size (tuple[int]): The height and width of the window. + num_heads (int): Number of attention heads. + qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True + qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set + attn_drop (float, optional): Dropout ratio of attention weight. Default: 0.0 + proj_drop (float, optional): Dropout ratio of output. Default: 0.0 + """ + + def __init__( + self, + dim, + window_size, + num_heads, + qkv_bias=True, + qk_scale=None, + attn_drop=0.0, + proj_drop=0.0, + ): + super().__init__() + self.dim = dim + self.window_size = window_size # Wh, Ww + self.num_heads = num_heads + head_dim = dim // num_heads + self.scale = qk_scale or head_dim**-0.5 + + # define a parameter table of relative position bias + self.relative_position_bias_table = nn.Parameter( # type: ignore + torch.zeros((2 * window_size[0] - 1) * (2 * window_size[1] - 1), num_heads) + ) # 2*Wh-1 * 2*Ww-1, nH + + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(dim, dim) + + self.proj_drop = nn.Dropout(proj_drop) + + trunc_normal_(self.relative_position_bias_table, std=0.02) + self.softmax = nn.Softmax(dim=-1) + + def forward(self, x, rpi, mask=None): + """ + Args: + x: input features with shape of (num_windows*b, n, c) + mask: (0/-inf) mask with shape of (num_windows, Wh*Ww, Wh*Ww) or None + """ + b_, n, c = x.shape + qkv = ( + self.qkv(x) + .reshape(b_, n, 3, self.num_heads, c // self.num_heads) + .permute(2, 0, 3, 1, 4) + ) + q, k, v = ( + qkv[0], + qkv[1], + qkv[2], + ) # make torchscript happy (cannot use tensor as tuple) + + q = q * self.scale + attn = q @ k.transpose(-2, -1) + + relative_position_bias = self.relative_position_bias_table[rpi.view(-1)].view( + self.window_size[0] * self.window_size[1], + self.window_size[0] * self.window_size[1], + -1, + ) # Wh*Ww,Wh*Ww,nH + relative_position_bias = relative_position_bias.permute( + 2, 0, 1 + ).contiguous() # nH, Wh*Ww, Wh*Ww + attn = attn + relative_position_bias.unsqueeze(0) + + if mask is not None: + nw = mask.shape[0] + attn = attn.view(b_ // nw, nw, self.num_heads, n, n) + mask.unsqueeze( + 1 + ).unsqueeze(0) + attn = attn.view(-1, self.num_heads, n, n) + attn = self.softmax(attn) + else: + attn = self.softmax(attn) + + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(b_, n, c) + x = self.proj(x) + x = self.proj_drop(x) + return x + + +class HAB(nn.Module): + r"""Hybrid Attention Block. + Args: + dim (int): Number of input channels. + input_resolution (tuple[int]): Input resolution. + num_heads (int): Number of attention heads. + window_size (int): Window size. + shift_size (int): Shift size for SW-MSA. + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. + qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True + qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set. + drop (float, optional): Dropout rate. Default: 0.0 + attn_drop (float, optional): Attention dropout rate. Default: 0.0 + drop_path (float, optional): Stochastic depth rate. Default: 0.0 + act_layer (nn.Module, optional): Activation layer. Default: nn.GELU + norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm + """ + + def __init__( + self, + dim, + input_resolution, + num_heads, + window_size=7, + shift_size=0, + compress_ratio=3, + squeeze_factor=30, + conv_scale=0.01, + mlp_ratio=4.0, + qkv_bias=True, + qk_scale=None, + drop=0.0, + attn_drop=0.0, + drop_path=0.0, + act_layer=nn.GELU, + norm_layer=nn.LayerNorm, + ): + super().__init__() + self.dim = dim + self.input_resolution = input_resolution + self.num_heads = num_heads + self.window_size = window_size + self.shift_size = shift_size + self.mlp_ratio = mlp_ratio + if min(self.input_resolution) <= self.window_size: + # if window size is larger than input resolution, we don't partition windows + self.shift_size = 0 + self.window_size = min(self.input_resolution) + assert ( + 0 <= self.shift_size < self.window_size + ), "shift_size must in 0-window_size" + + self.norm1 = norm_layer(dim) + self.attn = WindowAttention( + dim, + window_size=to_2tuple(self.window_size), + num_heads=num_heads, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + attn_drop=attn_drop, + proj_drop=drop, + ) + + self.conv_scale = conv_scale + self.conv_block = CAB( + num_feat=dim, compress_ratio=compress_ratio, squeeze_factor=squeeze_factor + ) + + self.drop_path = DropPath(drop_path) if drop_path > 0.0 else nn.Identity() + self.norm2 = norm_layer(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = Mlp( + in_features=dim, + hidden_features=mlp_hidden_dim, + act_layer=act_layer, + drop=drop, + ) + + def forward(self, x, x_size, rpi_sa, attn_mask): + h, w = x_size + b, _, c = x.shape + # assert seq_len == h * w, "input feature has wrong size" + + shortcut = x + x = self.norm1(x) + x = x.view(b, h, w, c) + + # Conv_X + conv_x = self.conv_block(x.permute(0, 3, 1, 2)) + conv_x = conv_x.permute(0, 2, 3, 1).contiguous().view(b, h * w, c) + + # cyclic shift + if self.shift_size > 0: + shifted_x = torch.roll( + x, shifts=(-self.shift_size, -self.shift_size), dims=(1, 2) + ) + attn_mask = attn_mask + else: + shifted_x = x + attn_mask = None + + # partition windows + x_windows = window_partition( + shifted_x, self.window_size + ) # nw*b, window_size, window_size, c + x_windows = x_windows.view( + -1, self.window_size * self.window_size, c + ) # nw*b, window_size*window_size, c + + # W-MSA/SW-MSA (to be compatible for testing on images whose shapes are the multiple of window size + attn_windows = self.attn(x_windows, rpi=rpi_sa, mask=attn_mask) + + # merge windows + attn_windows = attn_windows.view(-1, self.window_size, self.window_size, c) + shifted_x = window_reverse(attn_windows, self.window_size, h, w) # b h' w' c + + # reverse cyclic shift + if self.shift_size > 0: + attn_x = torch.roll( + shifted_x, shifts=(self.shift_size, self.shift_size), dims=(1, 2) + ) + else: + attn_x = shifted_x + attn_x = attn_x.view(b, h * w, c) + + # FFN + x = shortcut + self.drop_path(attn_x) + conv_x * self.conv_scale + x = x + self.drop_path(self.mlp(self.norm2(x))) + + return x + + +class PatchMerging(nn.Module): + r"""Patch Merging Layer. + Args: + input_resolution (tuple[int]): Resolution of input feature. + dim (int): Number of input channels. + norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm + """ + + def __init__(self, input_resolution, dim, norm_layer=nn.LayerNorm): + super().__init__() + self.input_resolution = input_resolution + self.dim = dim + self.reduction = nn.Linear(4 * dim, 2 * dim, bias=False) + self.norm = norm_layer(4 * dim) + + def forward(self, x): + """ + x: b, h*w, c + """ + h, w = self.input_resolution + b, seq_len, c = x.shape + assert seq_len == h * w, "input feature has wrong size" + assert h % 2 == 0 and w % 2 == 0, f"x size ({h}*{w}) are not even." + + x = x.view(b, h, w, c) + + x0 = x[:, 0::2, 0::2, :] # b h/2 w/2 c + x1 = x[:, 1::2, 0::2, :] # b h/2 w/2 c + x2 = x[:, 0::2, 1::2, :] # b h/2 w/2 c + x3 = x[:, 1::2, 1::2, :] # b h/2 w/2 c + x = torch.cat([x0, x1, x2, x3], -1) # b h/2 w/2 4*c + x = x.view(b, -1, 4 * c) # b h/2*w/2 4*c + + x = self.norm(x) + x = self.reduction(x) + + return x + + +class OCAB(nn.Module): + # overlapping cross-attention block + + def __init__( + self, + dim, + input_resolution, + window_size, + overlap_ratio, + num_heads, + qkv_bias=True, + qk_scale=None, + mlp_ratio=2, + norm_layer=nn.LayerNorm, + ): + super().__init__() + self.dim = dim + self.input_resolution = input_resolution + self.window_size = window_size + self.num_heads = num_heads + head_dim = dim // num_heads + self.scale = qk_scale or head_dim**-0.5 + self.overlap_win_size = int(window_size * overlap_ratio) + window_size + + self.norm1 = norm_layer(dim) + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.unfold = nn.Unfold( + kernel_size=(self.overlap_win_size, self.overlap_win_size), + stride=window_size, + padding=(self.overlap_win_size - window_size) // 2, + ) + + # define a parameter table of relative position bias + self.relative_position_bias_table = nn.Parameter( # type: ignore + torch.zeros( + (window_size + self.overlap_win_size - 1) + * (window_size + self.overlap_win_size - 1), + num_heads, + ) + ) # 2*Wh-1 * 2*Ww-1, nH + + trunc_normal_(self.relative_position_bias_table, std=0.02) + self.softmax = nn.Softmax(dim=-1) + + self.proj = nn.Linear(dim, dim) + + self.norm2 = norm_layer(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = Mlp( + in_features=dim, hidden_features=mlp_hidden_dim, act_layer=nn.GELU + ) + + def forward(self, x, x_size, rpi): + h, w = x_size + b, _, c = x.shape + + shortcut = x + x = self.norm1(x) + x = x.view(b, h, w, c) + + qkv = self.qkv(x).reshape(b, h, w, 3, c).permute(3, 0, 4, 1, 2) # 3, b, c, h, w + q = qkv[0].permute(0, 2, 3, 1) # b, h, w, c + kv = torch.cat((qkv[1], qkv[2]), dim=1) # b, 2*c, h, w + + # partition windows + q_windows = window_partition( + q, self.window_size + ) # nw*b, window_size, window_size, c + q_windows = q_windows.view( + -1, self.window_size * self.window_size, c + ) # nw*b, window_size*window_size, c + + kv_windows = self.unfold(kv) # b, c*w*w, nw + kv_windows = rearrange( + kv_windows, + "b (nc ch owh oww) nw -> nc (b nw) (owh oww) ch", + nc=2, + ch=c, + owh=self.overlap_win_size, + oww=self.overlap_win_size, + ).contiguous() # 2, nw*b, ow*ow, c + # Do the above rearrangement without the rearrange function + # kv_windows = kv_windows.view( + # 2, b, self.overlap_win_size, self.overlap_win_size, c, -1 + # ) + # kv_windows = kv_windows.permute(0, 5, 1, 2, 3, 4).contiguous() + # kv_windows = kv_windows.view( + # 2, -1, self.overlap_win_size * self.overlap_win_size, c + # ) + + k_windows, v_windows = kv_windows[0], kv_windows[1] # nw*b, ow*ow, c + + b_, nq, _ = q_windows.shape + _, n, _ = k_windows.shape + d = self.dim // self.num_heads + q = q_windows.reshape(b_, nq, self.num_heads, d).permute( + 0, 2, 1, 3 + ) # nw*b, nH, nq, d + k = k_windows.reshape(b_, n, self.num_heads, d).permute( + 0, 2, 1, 3 + ) # nw*b, nH, n, d + v = v_windows.reshape(b_, n, self.num_heads, d).permute( + 0, 2, 1, 3 + ) # nw*b, nH, n, d + + q = q * self.scale + attn = q @ k.transpose(-2, -1) + + relative_position_bias = self.relative_position_bias_table[rpi.view(-1)].view( + self.window_size * self.window_size, + self.overlap_win_size * self.overlap_win_size, + -1, + ) # ws*ws, wse*wse, nH + relative_position_bias = relative_position_bias.permute( + 2, 0, 1 + ).contiguous() # nH, ws*ws, wse*wse + attn = attn + relative_position_bias.unsqueeze(0) + + attn = self.softmax(attn) + attn_windows = (attn @ v).transpose(1, 2).reshape(b_, nq, self.dim) + + # merge windows + attn_windows = attn_windows.view( + -1, self.window_size, self.window_size, self.dim + ) + x = window_reverse(attn_windows, self.window_size, h, w) # b h w c + x = x.view(b, h * w, self.dim) + + x = self.proj(x) + shortcut + + x = x + self.mlp(self.norm2(x)) + return x + + +class AttenBlocks(nn.Module): + """A series of attention blocks for one RHAG. + Args: + dim (int): Number of input channels. + input_resolution (tuple[int]): Input resolution. + depth (int): Number of blocks. + num_heads (int): Number of attention heads. + window_size (int): Local window size. + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. + qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True + qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set. + drop (float, optional): Dropout rate. Default: 0.0 + attn_drop (float, optional): Attention dropout rate. Default: 0.0 + drop_path (float | tuple[float], optional): Stochastic depth rate. Default: 0.0 + norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm + downsample (nn.Module | None, optional): Downsample layer at the end of the layer. Default: None + use_checkpoint (bool): Whether to use checkpointing to save memory. Default: False. + """ + + def __init__( + self, + dim, + input_resolution, + depth, + num_heads, + window_size, + compress_ratio, + squeeze_factor, + conv_scale, + overlap_ratio, + mlp_ratio=4.0, + qkv_bias=True, + qk_scale=None, + drop=0.0, + attn_drop=0.0, + drop_path=0.0, + norm_layer=nn.LayerNorm, + downsample=None, + use_checkpoint=False, + ): + super().__init__() + self.dim = dim + self.input_resolution = input_resolution + self.depth = depth + self.use_checkpoint = use_checkpoint + + # build blocks + self.blocks = nn.ModuleList( + [ + HAB( + dim=dim, + input_resolution=input_resolution, + num_heads=num_heads, + window_size=window_size, + shift_size=0 if (i % 2 == 0) else window_size // 2, + compress_ratio=compress_ratio, + squeeze_factor=squeeze_factor, + conv_scale=conv_scale, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + drop=drop, + attn_drop=attn_drop, + drop_path=drop_path[i] + if isinstance(drop_path, list) + else drop_path, + norm_layer=norm_layer, + ) + for i in range(depth) + ] + ) + + # OCAB + self.overlap_attn = OCAB( + dim=dim, + input_resolution=input_resolution, + window_size=window_size, + overlap_ratio=overlap_ratio, + num_heads=num_heads, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + mlp_ratio=mlp_ratio, # type: ignore + norm_layer=norm_layer, + ) + + # patch merging layer + if downsample is not None: + self.downsample = downsample( + input_resolution, dim=dim, norm_layer=norm_layer + ) + else: + self.downsample = None + + def forward(self, x, x_size, params): + for blk in self.blocks: + x = blk(x, x_size, params["rpi_sa"], params["attn_mask"]) + + x = self.overlap_attn(x, x_size, params["rpi_oca"]) + + if self.downsample is not None: + x = self.downsample(x) + return x + + +class RHAG(nn.Module): + """Residual Hybrid Attention Group (RHAG). + Args: + dim (int): Number of input channels. + input_resolution (tuple[int]): Input resolution. + depth (int): Number of blocks. + num_heads (int): Number of attention heads. + window_size (int): Local window size. + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. + qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True + qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set. + drop (float, optional): Dropout rate. Default: 0.0 + attn_drop (float, optional): Attention dropout rate. Default: 0.0 + drop_path (float | tuple[float], optional): Stochastic depth rate. Default: 0.0 + norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm + downsample (nn.Module | None, optional): Downsample layer at the end of the layer. Default: None + use_checkpoint (bool): Whether to use checkpointing to save memory. Default: False. + img_size: Input image size. + patch_size: Patch size. + resi_connection: The convolutional block before residual connection. + """ + + def __init__( + self, + dim, + input_resolution, + depth, + num_heads, + window_size, + compress_ratio, + squeeze_factor, + conv_scale, + overlap_ratio, + mlp_ratio=4.0, + qkv_bias=True, + qk_scale=None, + drop=0.0, + attn_drop=0.0, + drop_path=0.0, + norm_layer=nn.LayerNorm, + downsample=None, + use_checkpoint=False, + img_size=224, + patch_size=4, + resi_connection="1conv", + ): + super(RHAG, self).__init__() + + self.dim = dim + self.input_resolution = input_resolution + + self.residual_group = AttenBlocks( + dim=dim, + input_resolution=input_resolution, + depth=depth, + num_heads=num_heads, + window_size=window_size, + compress_ratio=compress_ratio, + squeeze_factor=squeeze_factor, + conv_scale=conv_scale, + overlap_ratio=overlap_ratio, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + drop=drop, + attn_drop=attn_drop, + drop_path=drop_path, + norm_layer=norm_layer, + downsample=downsample, + use_checkpoint=use_checkpoint, + ) + + if resi_connection == "1conv": + self.conv = nn.Conv2d(dim, dim, 3, 1, 1) + elif resi_connection == "identity": + self.conv = nn.Identity() + + self.patch_embed = PatchEmbed( + img_size=img_size, + patch_size=patch_size, + in_chans=0, + embed_dim=dim, + norm_layer=None, + ) + + self.patch_unembed = PatchUnEmbed( + img_size=img_size, + patch_size=patch_size, + in_chans=0, + embed_dim=dim, + norm_layer=None, + ) + + def forward(self, x, x_size, params): + return ( + self.patch_embed( + self.conv( + self.patch_unembed(self.residual_group(x, x_size, params), x_size) + ) + ) + + x + ) + + +class PatchEmbed(nn.Module): + r"""Image to Patch Embedding + Args: + img_size (int): Image size. Default: 224. + patch_size (int): Patch token size. Default: 4. + in_chans (int): Number of input image channels. Default: 3. + embed_dim (int): Number of linear projection output channels. Default: 96. + norm_layer (nn.Module, optional): Normalization layer. Default: None + """ + + def __init__( + self, img_size=224, patch_size=4, in_chans=3, embed_dim=96, norm_layer=None + ): + super().__init__() + img_size = to_2tuple(img_size) + patch_size = to_2tuple(patch_size) + patches_resolution = [ + img_size[0] // patch_size[0], # type: ignore + img_size[1] // patch_size[1], # type: ignore + ] + self.img_size = img_size + self.patch_size = patch_size + self.patches_resolution = patches_resolution + self.num_patches = patches_resolution[0] * patches_resolution[1] + + self.in_chans = in_chans + self.embed_dim = embed_dim + + if norm_layer is not None: + self.norm = norm_layer(embed_dim) + else: + self.norm = None + + def forward(self, x): + x = x.flatten(2).transpose(1, 2) # b Ph*Pw c + if self.norm is not None: + x = self.norm(x) + return x + + +class PatchUnEmbed(nn.Module): + r"""Image to Patch Unembedding + Args: + img_size (int): Image size. Default: 224. + patch_size (int): Patch token size. Default: 4. + in_chans (int): Number of input image channels. Default: 3. + embed_dim (int): Number of linear projection output channels. Default: 96. + norm_layer (nn.Module, optional): Normalization layer. Default: None + """ + + def __init__( + self, img_size=224, patch_size=4, in_chans=3, embed_dim=96, norm_layer=None + ): + super().__init__() + img_size = to_2tuple(img_size) + patch_size = to_2tuple(patch_size) + patches_resolution = [ + img_size[0] // patch_size[0], # type: ignore + img_size[1] // patch_size[1], # type: ignore + ] + self.img_size = img_size + self.patch_size = patch_size + self.patches_resolution = patches_resolution + self.num_patches = patches_resolution[0] * patches_resolution[1] + + self.in_chans = in_chans + self.embed_dim = embed_dim + + def forward(self, x, x_size): + x = ( + x.transpose(1, 2) + .contiguous() + .view(x.shape[0], self.embed_dim, x_size[0], x_size[1]) + ) # b Ph*Pw c + return x + + +class Upsample(nn.Sequential): + """Upsample module. + Args: + scale (int): Scale factor. Supported scales: 2^n and 3. + num_feat (int): Channel number of intermediate features. + """ + + def __init__(self, scale, num_feat): + m = [] + if (scale & (scale - 1)) == 0: # scale = 2^n + for _ in range(int(math.log(scale, 2))): + m.append(nn.Conv2d(num_feat, 4 * num_feat, 3, 1, 1)) + m.append(nn.PixelShuffle(2)) + elif scale == 3: + m.append(nn.Conv2d(num_feat, 9 * num_feat, 3, 1, 1)) + m.append(nn.PixelShuffle(3)) + else: + raise ValueError( + f"scale {scale} is not supported. " "Supported scales: 2^n and 3." + ) + super(Upsample, self).__init__(*m) + + +class HAT(nn.Module): + r"""Hybrid Attention Transformer + A PyTorch implementation of : `Activating More Pixels in Image Super-Resolution Transformer`. + Some codes are based on SwinIR. + Args: + img_size (int | tuple(int)): Input image size. Default 64 + patch_size (int | tuple(int)): Patch size. Default: 1 + in_chans (int): Number of input image channels. Default: 3 + embed_dim (int): Patch embedding dimension. Default: 96 + depths (tuple(int)): Depth of each Swin Transformer layer. + num_heads (tuple(int)): Number of attention heads in different layers. + window_size (int): Window size. Default: 7 + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. Default: 4 + qkv_bias (bool): If True, add a learnable bias to query, key, value. Default: True + qk_scale (float): Override default qk scale of head_dim ** -0.5 if set. Default: None + drop_rate (float): Dropout rate. Default: 0 + attn_drop_rate (float): Attention dropout rate. Default: 0 + drop_path_rate (float): Stochastic depth rate. Default: 0.1 + norm_layer (nn.Module): Normalization layer. Default: nn.LayerNorm. + ape (bool): If True, add absolute position embedding to the patch embedding. Default: False + patch_norm (bool): If True, add normalization after patch embedding. Default: True + use_checkpoint (bool): Whether to use checkpointing to save memory. Default: False + upscale: Upscale factor. 2/3/4/8 for image SR, 1 for denoising and compress artifact reduction + img_range: Image range. 1. or 255. + upsampler: The reconstruction reconstruction module. 'pixelshuffle'/'pixelshuffledirect'/'nearest+conv'/None + resi_connection: The convolutional block before residual connection. '1conv'/'3conv' + """ + + def __init__( + self, + state_dict, + **kwargs, + ): + super(HAT, self).__init__() + + # Defaults + img_size = 64 + patch_size = 1 + in_chans = 3 + embed_dim = 96 + depths = (6, 6, 6, 6) + num_heads = (6, 6, 6, 6) + window_size = 7 + compress_ratio = 3 + squeeze_factor = 30 + conv_scale = 0.01 + overlap_ratio = 0.5 + mlp_ratio = 4.0 + qkv_bias = True + qk_scale = None + drop_rate = 0.0 + attn_drop_rate = 0.0 + drop_path_rate = 0.1 + norm_layer = nn.LayerNorm + ape = False + patch_norm = True + use_checkpoint = False + upscale = 2 + img_range = 1.0 + upsampler = "" + resi_connection = "1conv" + + self.state = state_dict + self.model_arch = "HAT" + self.sub_type = "SR" + self.supports_fp16 = False + self.support_bf16 = True + self.min_size_restriction = 16 + + state_keys = list(state_dict.keys()) + + num_feat = state_dict["conv_last.weight"].shape[1] + in_chans = state_dict["conv_first.weight"].shape[1] + num_out_ch = state_dict["conv_last.weight"].shape[0] + embed_dim = state_dict["conv_first.weight"].shape[0] + + if "conv_before_upsample.0.weight" in state_keys: + if "conv_up1.weight" in state_keys: + upsampler = "nearest+conv" + else: + upsampler = "pixelshuffle" + supports_fp16 = False + elif "upsample.0.weight" in state_keys: + upsampler = "pixelshuffledirect" + else: + upsampler = "" + upscale = 1 + if upsampler == "nearest+conv": + upsample_keys = [ + x for x in state_keys if "conv_up" in x and "bias" not in x + ] + + for upsample_key in upsample_keys: + upscale *= 2 + elif upsampler == "pixelshuffle": + upsample_keys = [ + x + for x in state_keys + if "upsample" in x and "conv" not in x and "bias" not in x + ] + for upsample_key in upsample_keys: + shape = self.state[upsample_key].shape[0] + upscale *= math.sqrt(shape // num_feat) + upscale = int(upscale) + elif upsampler == "pixelshuffledirect": + upscale = int( + math.sqrt(self.state["upsample.0.bias"].shape[0] // num_out_ch) + ) + + max_layer_num = 0 + max_block_num = 0 + for key in state_keys: + result = re.match( + r"layers.(\d*).residual_group.blocks.(\d*).conv_block.cab.0.weight", key + ) + if result: + layer_num, block_num = result.groups() + max_layer_num = max(max_layer_num, int(layer_num)) + max_block_num = max(max_block_num, int(block_num)) + + depths = [max_block_num + 1 for _ in range(max_layer_num + 1)] + + if ( + "layers.0.residual_group.blocks.0.attn.relative_position_bias_table" + in state_keys + ): + num_heads_num = self.state[ + "layers.0.residual_group.blocks.0.attn.relative_position_bias_table" + ].shape[-1] + num_heads = [num_heads_num for _ in range(max_layer_num + 1)] + else: + num_heads = depths + + mlp_ratio = float( + self.state["layers.0.residual_group.blocks.0.mlp.fc1.bias"].shape[0] + / embed_dim + ) + + # TODO: could actually count the layers, but this should do + if "layers.0.conv.4.weight" in state_keys: + resi_connection = "3conv" + else: + resi_connection = "1conv" + + window_size = int(math.sqrt(self.state["relative_position_index_SA"].shape[0])) + + # Not sure if this is needed or used at all anywhere in HAT's config + if "layers.0.residual_group.blocks.1.attn_mask" in state_keys: + img_size = int( + math.sqrt( + self.state["layers.0.residual_group.blocks.1.attn_mask"].shape[0] + ) + * window_size + ) + + self.window_size = window_size + self.shift_size = window_size // 2 + self.overlap_ratio = overlap_ratio + + self.in_nc = in_chans + self.out_nc = num_out_ch + self.num_feat = num_feat + self.embed_dim = embed_dim + self.num_heads = num_heads + self.depths = depths + self.window_size = window_size + self.mlp_ratio = mlp_ratio + self.scale = upscale + self.upsampler = upsampler + self.img_size = img_size + self.img_range = img_range + self.resi_connection = resi_connection + + num_in_ch = in_chans + # num_out_ch = in_chans + # num_feat = 64 + self.img_range = img_range + if in_chans == 3: + rgb_mean = (0.4488, 0.4371, 0.4040) + self.mean = torch.Tensor(rgb_mean).view(1, 3, 1, 1) + else: + self.mean = torch.zeros(1, 1, 1, 1) + self.upscale = upscale + self.upsampler = upsampler + + # relative position index + relative_position_index_SA = self.calculate_rpi_sa() + relative_position_index_OCA = self.calculate_rpi_oca() + self.register_buffer("relative_position_index_SA", relative_position_index_SA) + self.register_buffer("relative_position_index_OCA", relative_position_index_OCA) + + # ------------------------- 1, shallow feature extraction ------------------------- # + self.conv_first = nn.Conv2d(num_in_ch, embed_dim, 3, 1, 1) + + # ------------------------- 2, deep feature extraction ------------------------- # + self.num_layers = len(depths) + self.embed_dim = embed_dim + self.ape = ape + self.patch_norm = patch_norm + self.num_features = embed_dim + self.mlp_ratio = mlp_ratio + + # split image into non-overlapping patches + self.patch_embed = PatchEmbed( + img_size=img_size, + patch_size=patch_size, + in_chans=embed_dim, + embed_dim=embed_dim, + norm_layer=norm_layer if self.patch_norm else None, + ) + num_patches = self.patch_embed.num_patches + patches_resolution = self.patch_embed.patches_resolution + self.patches_resolution = patches_resolution + + # merge non-overlapping patches into image + self.patch_unembed = PatchUnEmbed( + img_size=img_size, + patch_size=patch_size, + in_chans=embed_dim, + embed_dim=embed_dim, + norm_layer=norm_layer if self.patch_norm else None, + ) + + # absolute position embedding + if self.ape: + self.absolute_pos_embed = nn.Parameter( # type: ignore[arg-type] + torch.zeros(1, num_patches, embed_dim) + ) + trunc_normal_(self.absolute_pos_embed, std=0.02) + + self.pos_drop = nn.Dropout(p=drop_rate) + + # stochastic depth + dpr = [ + x.item() for x in torch.linspace(0, drop_path_rate, sum(depths)) + ] # stochastic depth decay rule + + # build Residual Hybrid Attention Groups (RHAG) + self.layers = nn.ModuleList() + for i_layer in range(self.num_layers): + layer = RHAG( + dim=embed_dim, + input_resolution=(patches_resolution[0], patches_resolution[1]), + depth=depths[i_layer], + num_heads=num_heads[i_layer], + window_size=window_size, + compress_ratio=compress_ratio, + squeeze_factor=squeeze_factor, + conv_scale=conv_scale, + overlap_ratio=overlap_ratio, + mlp_ratio=self.mlp_ratio, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + drop=drop_rate, + attn_drop=attn_drop_rate, + drop_path=dpr[ + sum(depths[:i_layer]) : sum(depths[: i_layer + 1]) # type: ignore + ], # no impact on SR results + norm_layer=norm_layer, + downsample=None, + use_checkpoint=use_checkpoint, + img_size=img_size, + patch_size=patch_size, + resi_connection=resi_connection, + ) + self.layers.append(layer) + self.norm = norm_layer(self.num_features) + + # build the last conv layer in deep feature extraction + if resi_connection == "1conv": + self.conv_after_body = nn.Conv2d(embed_dim, embed_dim, 3, 1, 1) + elif resi_connection == "identity": + self.conv_after_body = nn.Identity() + + # ------------------------- 3, high quality image reconstruction ------------------------- # + if self.upsampler == "pixelshuffle": + # for classical SR + self.conv_before_upsample = nn.Sequential( + nn.Conv2d(embed_dim, num_feat, 3, 1, 1), nn.LeakyReLU(inplace=True) + ) + self.upsample = Upsample(upscale, num_feat) + self.conv_last = nn.Conv2d(num_feat, num_out_ch, 3, 1, 1) + + self.apply(self._init_weights) + self.load_state_dict(self.state, strict=False) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=0.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + def calculate_rpi_sa(self): + # calculate relative position index for SA + coords_h = torch.arange(self.window_size) + coords_w = torch.arange(self.window_size) + coords = torch.stack(torch.meshgrid([coords_h, coords_w])) # 2, Wh, Ww + coords_flatten = torch.flatten(coords, 1) # 2, Wh*Ww + relative_coords = ( + coords_flatten[:, :, None] - coords_flatten[:, None, :] + ) # 2, Wh*Ww, Wh*Ww + relative_coords = relative_coords.permute( + 1, 2, 0 + ).contiguous() # Wh*Ww, Wh*Ww, 2 + relative_coords[:, :, 0] += self.window_size - 1 # shift to start from 0 + relative_coords[:, :, 1] += self.window_size - 1 + relative_coords[:, :, 0] *= 2 * self.window_size - 1 + relative_position_index = relative_coords.sum(-1) # Wh*Ww, Wh*Ww + return relative_position_index + + def calculate_rpi_oca(self): + # calculate relative position index for OCA + window_size_ori = self.window_size + window_size_ext = self.window_size + int(self.overlap_ratio * self.window_size) + + coords_h = torch.arange(window_size_ori) + coords_w = torch.arange(window_size_ori) + coords_ori = torch.stack(torch.meshgrid([coords_h, coords_w])) # 2, ws, ws + coords_ori_flatten = torch.flatten(coords_ori, 1) # 2, ws*ws + + coords_h = torch.arange(window_size_ext) + coords_w = torch.arange(window_size_ext) + coords_ext = torch.stack(torch.meshgrid([coords_h, coords_w])) # 2, wse, wse + coords_ext_flatten = torch.flatten(coords_ext, 1) # 2, wse*wse + + relative_coords = ( + coords_ext_flatten[:, None, :] - coords_ori_flatten[:, :, None] + ) # 2, ws*ws, wse*wse + + relative_coords = relative_coords.permute( + 1, 2, 0 + ).contiguous() # ws*ws, wse*wse, 2 + relative_coords[:, :, 0] += ( + window_size_ori - window_size_ext + 1 + ) # shift to start from 0 + relative_coords[:, :, 1] += window_size_ori - window_size_ext + 1 + + relative_coords[:, :, 0] *= window_size_ori + window_size_ext - 1 + relative_position_index = relative_coords.sum(-1) + return relative_position_index + + def calculate_mask(self, x_size): + # calculate attention mask for SW-MSA + h, w = x_size + img_mask = torch.zeros((1, h, w, 1)) # 1 h w 1 + h_slices = ( + slice(0, -self.window_size), + slice(-self.window_size, -self.shift_size), + slice(-self.shift_size, None), + ) + w_slices = ( + slice(0, -self.window_size), + slice(-self.window_size, -self.shift_size), + slice(-self.shift_size, None), + ) + cnt = 0 + for h in h_slices: + for w in w_slices: + img_mask[:, h, w, :] = cnt + cnt += 1 + + mask_windows = window_partition( + img_mask, self.window_size + ) # nw, window_size, window_size, 1 + mask_windows = mask_windows.view(-1, self.window_size * self.window_size) + attn_mask = mask_windows.unsqueeze(1) - mask_windows.unsqueeze(2) + attn_mask = attn_mask.masked_fill(attn_mask != 0, float(-100.0)).masked_fill( + attn_mask == 0, float(0.0) + ) + + return attn_mask + + @torch.jit.ignore # type: ignore + def no_weight_decay(self): + return {"absolute_pos_embed"} + + @torch.jit.ignore # type: ignore + def no_weight_decay_keywords(self): + return {"relative_position_bias_table"} + + def check_image_size(self, x): + _, _, h, w = x.size() + mod_pad_h = (self.window_size - h % self.window_size) % self.window_size + mod_pad_w = (self.window_size - w % self.window_size) % self.window_size + x = F.pad(x, (0, mod_pad_w, 0, mod_pad_h), "reflect") + return x + + def forward_features(self, x): + x_size = (x.shape[2], x.shape[3]) + + # Calculate attention mask and relative position index in advance to speed up inference. + # The original code is very time-cosuming for large window size. + attn_mask = self.calculate_mask(x_size).to(x.device) + params = { + "attn_mask": attn_mask, + "rpi_sa": self.relative_position_index_SA, + "rpi_oca": self.relative_position_index_OCA, + } + + x = self.patch_embed(x) + if self.ape: + x = x + self.absolute_pos_embed + x = self.pos_drop(x) + + for layer in self.layers: + x = layer(x, x_size, params) + + x = self.norm(x) # b seq_len c + x = self.patch_unembed(x, x_size) + + return x + + def forward(self, x): + H, W = x.shape[2:] + self.mean = self.mean.type_as(x) + x = (x - self.mean) * self.img_range + x = self.check_image_size(x) + + if self.upsampler == "pixelshuffle": + # for classical SR + x = self.conv_first(x) + x = self.conv_after_body(self.forward_features(x)) + x + x = self.conv_before_upsample(x) + x = self.conv_last(self.upsample(x)) + + x = x / self.img_range + self.mean + + return x[:, :, : H * self.upscale, : W * self.upscale] diff --git a/ldm_patched/pfn/architecture/LICENSE-DAT b/ldm_patched/pfn/architecture/LICENSE-DAT new file mode 100644 index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64 --- /dev/null +++ b/ldm_patched/pfn/architecture/LICENSE-DAT @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/ldm_patched/pfn/architecture/LICENSE-ESRGAN b/ldm_patched/pfn/architecture/LICENSE-ESRGAN new file mode 100644 index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64 --- /dev/null +++ b/ldm_patched/pfn/architecture/LICENSE-ESRGAN @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/ldm_patched/pfn/architecture/LICENSE-HAT b/ldm_patched/pfn/architecture/LICENSE-HAT new file mode 100644 index 0000000000000000000000000000000000000000..003e97e96cbed07d07b5ff15831711181607edb3 --- /dev/null +++ b/ldm_patched/pfn/architecture/LICENSE-HAT @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 Xiangyu Chen + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/ldm_patched/pfn/architecture/LICENSE-RealESRGAN b/ldm_patched/pfn/architecture/LICENSE-RealESRGAN new file mode 100644 index 0000000000000000000000000000000000000000..552a1eeaf01f4e7077013ed3496600c608f35202 --- /dev/null +++ b/ldm_patched/pfn/architecture/LICENSE-RealESRGAN @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2021, Xintao Wang +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/ldm_patched/pfn/architecture/LICENSE-SCUNet b/ldm_patched/pfn/architecture/LICENSE-SCUNet new file mode 100644 index 0000000000000000000000000000000000000000..ff75c988f3482ab21da41f0d10068108be54ad88 --- /dev/null +++ b/ldm_patched/pfn/architecture/LICENSE-SCUNet @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2022 Kai Zhang (cskaizhang@gmail.com, https://cszn.github.io/). All rights reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/ldm_patched/pfn/architecture/LICENSE-SPSR b/ldm_patched/pfn/architecture/LICENSE-SPSR new file mode 100644 index 0000000000000000000000000000000000000000..3245f3f9e4f476ee3a283f41dd0d9db65544c222 --- /dev/null +++ b/ldm_patched/pfn/architecture/LICENSE-SPSR @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2022 BasicSR Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/ldm_patched/pfn/architecture/LICENSE-SwiftSRGAN b/ldm_patched/pfn/architecture/LICENSE-SwiftSRGAN new file mode 100644 index 0000000000000000000000000000000000000000..0e259d42c996742e9e3cba14c677129b2c1b6311 --- /dev/null +++ b/ldm_patched/pfn/architecture/LICENSE-SwiftSRGAN @@ -0,0 +1,121 @@ +Creative Commons Legal Code + +CC0 1.0 Universal + + CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE + LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN + ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS + INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES + REGARDING THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS + PROVIDED HEREUNDER, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM + THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED + HEREUNDER. + +Statement of Purpose + +The laws of most jurisdictions throughout the world automatically confer +exclusive Copyright and Related Rights (defined below) upon the creator +and subsequent owner(s) (each and all, an "owner") of an original work of +authorship and/or a database (each, a "Work"). + +Certain owners wish to permanently relinquish those rights to a Work for +the purpose of contributing to a commons of creative, cultural and +scientific works ("Commons") that the public can reliably and without fear +of later claims of infringement build upon, modify, incorporate in other +works, reuse and redistribute as freely as possible in any form whatsoever +and for any purposes, including without limitation commercial purposes. +These owners may contribute to the Commons to promote the ideal of a free +culture and the further production of creative, cultural and scientific +works, or to gain reputation or greater distribution for their Work in +part through the use and efforts of others. + +For these and/or other purposes and motivations, and without any +expectation of additional consideration or compensation, the person +associating CC0 with a Work (the "Affirmer"), to the extent that he or she +is an owner of Copyright and Related Rights in the Work, voluntarily +elects to apply CC0 to the Work and publicly distribute the Work under its +terms, with knowledge of his or her Copyright and Related Rights in the +Work and the meaning and intended legal effect of CC0 on those rights. + +1. Copyright and Related Rights. A Work made available under CC0 may be +protected by copyright and related or neighboring rights ("Copyright and +Related Rights"). Copyright and Related Rights include, but are not +limited to, the following: + + i. the right to reproduce, adapt, distribute, perform, display, + communicate, and translate a Work; + ii. moral rights retained by the original author(s) and/or performer(s); +iii. publicity and privacy rights pertaining to a person's image or + likeness depicted in a Work; + iv. rights protecting against unfair competition in regards to a Work, + subject to the limitations in paragraph 4(a), below; + v. rights protecting the extraction, dissemination, use and reuse of data + in a Work; + vi. database rights (such as those arising under Directive 96/9/EC of the + European Parliament and of the Council of 11 March 1996 on the legal + protection of databases, and under any national implementation + thereof, including any amended or successor version of such + directive); and +vii. other similar, equivalent or corresponding rights throughout the + world based on applicable law or treaty, and any national + implementations thereof. + +2. Waiver. To the greatest extent permitted by, but not in contravention +of, applicable law, Affirmer hereby overtly, fully, permanently, +irrevocably and unconditionally waives, abandons, and surrenders all of +Affirmer's Copyright and Related Rights and associated claims and causes +of action, whether now known or unknown (including existing as well as +future claims and causes of action), in the Work (i) in all territories +worldwide, (ii) for the maximum duration provided by applicable law or +treaty (including future time extensions), (iii) in any current or future +medium and for any number of copies, and (iv) for any purpose whatsoever, +including without limitation commercial, advertising or promotional +purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each +member of the public at large and to the detriment of Affirmer's heirs and +successors, fully intending that such Waiver shall not be subject to +revocation, rescission, cancellation, termination, or any other legal or +equitable action to disrupt the quiet enjoyment of the Work by the public +as contemplated by Affirmer's express Statement of Purpose. + +3. Public License Fallback. Should any part of the Waiver for any reason +be judged legally invalid or ineffective under applicable law, then the +Waiver shall be preserved to the maximum extent permitted taking into +account Affirmer's express Statement of Purpose. In addition, to the +extent the Waiver is so judged Affirmer hereby grants to each affected +person a royalty-free, non transferable, non sublicensable, non exclusive, +irrevocable and unconditional license to exercise Affirmer's Copyright and +Related Rights in the Work (i) in all territories worldwide, (ii) for the +maximum duration provided by applicable law or treaty (including future +time extensions), (iii) in any current or future medium and for any number +of copies, and (iv) for any purpose whatsoever, including without +limitation commercial, advertising or promotional purposes (the +"License"). The License shall be deemed effective as of the date CC0 was +applied by Affirmer to the Work. Should any part of the License for any +reason be judged legally invalid or ineffective under applicable law, such +partial invalidity or ineffectiveness shall not invalidate the remainder +of the License, and in such case Affirmer hereby affirms that he or she +will not (i) exercise any of his or her remaining Copyright and Related +Rights in the Work or (ii) assert any associated claims and causes of +action with respect to the Work, in either case contrary to Affirmer's +express Statement of Purpose. + +4. Limitations and Disclaimers. + + a. No trademark or patent rights held by Affirmer are waived, abandoned, + surrendered, licensed or otherwise affected by this document. + b. Affirmer offers the Work as-is and makes no representations or + warranties of any kind concerning the Work, express, implied, + statutory or otherwise, including without limitation warranties of + title, merchantability, fitness for a particular purpose, non + infringement, or the absence of latent or other defects, accuracy, or + the present or absence of errors, whether or not discoverable, all to + the greatest extent permissible under applicable law. + c. Affirmer disclaims responsibility for clearing rights of other persons + that may apply to the Work or any use thereof, including without + limitation any person's Copyright and Related Rights in the Work. + Further, Affirmer disclaims responsibility for obtaining any necessary + consents, permissions or other rights required for any use of the + Work. + d. Affirmer understands and acknowledges that Creative Commons is not a + party to this document and has no duty or obligation with respect to + this CC0 or use of the Work. diff --git a/ldm_patched/pfn/architecture/LICENSE-Swin2SR b/ldm_patched/pfn/architecture/LICENSE-Swin2SR new file mode 100644 index 0000000000000000000000000000000000000000..e5e4ee061a3f3fbad64bc837425716af7fb108f5 --- /dev/null +++ b/ldm_patched/pfn/architecture/LICENSE-Swin2SR @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [2021] [SwinIR Authors] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/ldm_patched/pfn/architecture/LICENSE-SwinIR b/ldm_patched/pfn/architecture/LICENSE-SwinIR new file mode 100644 index 0000000000000000000000000000000000000000..e5e4ee061a3f3fbad64bc837425716af7fb108f5 --- /dev/null +++ b/ldm_patched/pfn/architecture/LICENSE-SwinIR @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [2021] [SwinIR Authors] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/ldm_patched/pfn/architecture/LICENSE-lama b/ldm_patched/pfn/architecture/LICENSE-lama new file mode 100644 index 0000000000000000000000000000000000000000..ca822bb5f62a37a5a73f56a2d563b16dab46c03f --- /dev/null +++ b/ldm_patched/pfn/architecture/LICENSE-lama @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [2021] Samsung Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/ldm_patched/pfn/architecture/LaMa.py b/ldm_patched/pfn/architecture/LaMa.py new file mode 100644 index 0000000000000000000000000000000000000000..a781f3e4dda789c06493fcf35a9803ee61efce73 --- /dev/null +++ b/ldm_patched/pfn/architecture/LaMa.py @@ -0,0 +1,694 @@ +# pylint: skip-file +""" +Model adapted from advimman's lama project: https://github.com/advimman/lama +""" + +# Fast Fourier Convolution NeurIPS 2020 +# original implementation https://github.com/pkumivision/FFC/blob/main/model_zoo/ffc.py +# paper https://proceedings.neurips.cc/paper/2020/file/2fd5d41ec6cfab47e32164d5624269b1-Paper.pdf + +from typing import List + +import torch +import torch.nn as nn +import torch.nn.functional as F +from torchvision.transforms.functional import InterpolationMode, rotate + + +class LearnableSpatialTransformWrapper(nn.Module): + def __init__(self, impl, pad_coef=0.5, angle_init_range=80, train_angle=True): + super().__init__() + self.impl = impl + self.angle = torch.rand(1) * angle_init_range + if train_angle: + self.angle = nn.Parameter(self.angle, requires_grad=True) + self.pad_coef = pad_coef + + def forward(self, x): + if torch.is_tensor(x): + return self.inverse_transform(self.impl(self.transform(x)), x) + elif isinstance(x, tuple): + x_trans = tuple(self.transform(elem) for elem in x) + y_trans = self.impl(x_trans) + return tuple( + self.inverse_transform(elem, orig_x) for elem, orig_x in zip(y_trans, x) + ) + else: + raise ValueError(f"Unexpected input type {type(x)}") + + def transform(self, x): + height, width = x.shape[2:] + pad_h, pad_w = int(height * self.pad_coef), int(width * self.pad_coef) + x_padded = F.pad(x, [pad_w, pad_w, pad_h, pad_h], mode="reflect") + x_padded_rotated = rotate( + x_padded, self.angle.to(x_padded), InterpolationMode.BILINEAR, fill=0 + ) + + return x_padded_rotated + + def inverse_transform(self, y_padded_rotated, orig_x): + height, width = orig_x.shape[2:] + pad_h, pad_w = int(height * self.pad_coef), int(width * self.pad_coef) + + y_padded = rotate( + y_padded_rotated, + -self.angle.to(y_padded_rotated), + InterpolationMode.BILINEAR, + fill=0, + ) + y_height, y_width = y_padded.shape[2:] + y = y_padded[:, :, pad_h : y_height - pad_h, pad_w : y_width - pad_w] + return y + + +class SELayer(nn.Module): + def __init__(self, channel, reduction=16): + super(SELayer, self).__init__() + self.avg_pool = nn.AdaptiveAvgPool2d(1) + self.fc = nn.Sequential( + nn.Linear(channel, channel // reduction, bias=False), + nn.ReLU(inplace=True), + nn.Linear(channel // reduction, channel, bias=False), + nn.Sigmoid(), + ) + + def forward(self, x): + b, c, _, _ = x.size() + y = self.avg_pool(x).view(b, c) + y = self.fc(y).view(b, c, 1, 1) + res = x * y.expand_as(x) + return res + + +class FourierUnit(nn.Module): + def __init__( + self, + in_channels, + out_channels, + groups=1, + spatial_scale_factor=None, + spatial_scale_mode="bilinear", + spectral_pos_encoding=False, + use_se=False, + se_kwargs=None, + ffc3d=False, + fft_norm="ortho", + ): + # bn_layer not used + super(FourierUnit, self).__init__() + self.groups = groups + + self.conv_layer = torch.nn.Conv2d( + in_channels=in_channels * 2 + (2 if spectral_pos_encoding else 0), + out_channels=out_channels * 2, + kernel_size=1, + stride=1, + padding=0, + groups=self.groups, + bias=False, + ) + self.bn = torch.nn.BatchNorm2d(out_channels * 2) + self.relu = torch.nn.ReLU(inplace=True) + + # squeeze and excitation block + self.use_se = use_se + if use_se: + if se_kwargs is None: + se_kwargs = {} + self.se = SELayer(self.conv_layer.in_channels, **se_kwargs) + + self.spatial_scale_factor = spatial_scale_factor + self.spatial_scale_mode = spatial_scale_mode + self.spectral_pos_encoding = spectral_pos_encoding + self.ffc3d = ffc3d + self.fft_norm = fft_norm + + def forward(self, x): + half_check = False + if x.type() == "torch.cuda.HalfTensor": + # half only works on gpu anyway + half_check = True + + batch = x.shape[0] + + if self.spatial_scale_factor is not None: + orig_size = x.shape[-2:] + x = F.interpolate( + x, + scale_factor=self.spatial_scale_factor, + mode=self.spatial_scale_mode, + align_corners=False, + ) + + # (batch, c, h, w/2+1, 2) + fft_dim = (-3, -2, -1) if self.ffc3d else (-2, -1) + if half_check == True: + ffted = torch.fft.rfftn( + x.float(), dim=fft_dim, norm=self.fft_norm + ) # .type(torch.cuda.HalfTensor) + else: + ffted = torch.fft.rfftn(x, dim=fft_dim, norm=self.fft_norm) + + ffted = torch.stack((ffted.real, ffted.imag), dim=-1) + ffted = ffted.permute(0, 1, 4, 2, 3).contiguous() # (batch, c, 2, h, w/2+1) + ffted = ffted.view( + ( + batch, + -1, + ) + + ffted.size()[3:] + ) + + if self.spectral_pos_encoding: + height, width = ffted.shape[-2:] + coords_vert = ( + torch.linspace(0, 1, height)[None, None, :, None] + .expand(batch, 1, height, width) + .to(ffted) + ) + coords_hor = ( + torch.linspace(0, 1, width)[None, None, None, :] + .expand(batch, 1, height, width) + .to(ffted) + ) + ffted = torch.cat((coords_vert, coords_hor, ffted), dim=1) + + if self.use_se: + ffted = self.se(ffted) + + if half_check == True: + ffted = self.conv_layer(ffted.half()) # (batch, c*2, h, w/2+1) + else: + ffted = self.conv_layer( + ffted + ) # .type(torch.cuda.FloatTensor) # (batch, c*2, h, w/2+1) + + ffted = self.relu(self.bn(ffted)) + # forcing to be always float + ffted = ffted.float() + + ffted = ( + ffted.view( + ( + batch, + -1, + 2, + ) + + ffted.size()[2:] + ) + .permute(0, 1, 3, 4, 2) + .contiguous() + ) # (batch,c, t, h, w/2+1, 2) + + ffted = torch.complex(ffted[..., 0], ffted[..., 1]) + + ifft_shape_slice = x.shape[-3:] if self.ffc3d else x.shape[-2:] + output = torch.fft.irfftn( + ffted, s=ifft_shape_slice, dim=fft_dim, norm=self.fft_norm + ) + + if half_check == True: + output = output.half() + + if self.spatial_scale_factor is not None: + output = F.interpolate( + output, + size=orig_size, + mode=self.spatial_scale_mode, + align_corners=False, + ) + + return output + + +class SpectralTransform(nn.Module): + def __init__( + self, + in_channels, + out_channels, + stride=1, + groups=1, + enable_lfu=True, + separable_fu=False, + **fu_kwargs, + ): + # bn_layer not used + super(SpectralTransform, self).__init__() + self.enable_lfu = enable_lfu + if stride == 2: + self.downsample = nn.AvgPool2d(kernel_size=(2, 2), stride=2) + else: + self.downsample = nn.Identity() + + self.stride = stride + self.conv1 = nn.Sequential( + nn.Conv2d( + in_channels, out_channels // 2, kernel_size=1, groups=groups, bias=False + ), + nn.BatchNorm2d(out_channels // 2), + nn.ReLU(inplace=True), + ) + fu_class = FourierUnit + self.fu = fu_class(out_channels // 2, out_channels // 2, groups, **fu_kwargs) + if self.enable_lfu: + self.lfu = fu_class(out_channels // 2, out_channels // 2, groups) + self.conv2 = torch.nn.Conv2d( + out_channels // 2, out_channels, kernel_size=1, groups=groups, bias=False + ) + + def forward(self, x): + x = self.downsample(x) + x = self.conv1(x) + output = self.fu(x) + + if self.enable_lfu: + _, c, h, _ = x.shape + split_no = 2 + split_s = h // split_no + xs = torch.cat( + torch.split(x[:, : c // 4], split_s, dim=-2), dim=1 + ).contiguous() + xs = torch.cat(torch.split(xs, split_s, dim=-1), dim=1).contiguous() + xs = self.lfu(xs) + xs = xs.repeat(1, 1, split_no, split_no).contiguous() + else: + xs = 0 + + output = self.conv2(x + output + xs) + + return output + + +class FFC(nn.Module): + def __init__( + self, + in_channels, + out_channels, + kernel_size, + ratio_gin, + ratio_gout, + stride=1, + padding=0, + dilation=1, + groups=1, + bias=False, + enable_lfu=True, + padding_type="reflect", + gated=False, + **spectral_kwargs, + ): + super(FFC, self).__init__() + + assert stride == 1 or stride == 2, "Stride should be 1 or 2." + self.stride = stride + + in_cg = int(in_channels * ratio_gin) + in_cl = in_channels - in_cg + out_cg = int(out_channels * ratio_gout) + out_cl = out_channels - out_cg + # groups_g = 1 if groups == 1 else int(groups * ratio_gout) + # groups_l = 1 if groups == 1 else groups - groups_g + + self.ratio_gin = ratio_gin + self.ratio_gout = ratio_gout + self.global_in_num = in_cg + + module = nn.Identity if in_cl == 0 or out_cl == 0 else nn.Conv2d + self.convl2l = module( + in_cl, + out_cl, + kernel_size, + stride, + padding, + dilation, + groups, + bias, + padding_mode=padding_type, + ) + module = nn.Identity if in_cl == 0 or out_cg == 0 else nn.Conv2d + self.convl2g = module( + in_cl, + out_cg, + kernel_size, + stride, + padding, + dilation, + groups, + bias, + padding_mode=padding_type, + ) + module = nn.Identity if in_cg == 0 or out_cl == 0 else nn.Conv2d + self.convg2l = module( + in_cg, + out_cl, + kernel_size, + stride, + padding, + dilation, + groups, + bias, + padding_mode=padding_type, + ) + module = nn.Identity if in_cg == 0 or out_cg == 0 else SpectralTransform + self.convg2g = module( + in_cg, + out_cg, + stride, + 1 if groups == 1 else groups // 2, + enable_lfu, + **spectral_kwargs, + ) + + self.gated = gated + module = ( + nn.Identity if in_cg == 0 or out_cl == 0 or not self.gated else nn.Conv2d + ) + self.gate = module(in_channels, 2, 1) + + def forward(self, x): + x_l, x_g = x if type(x) is tuple else (x, 0) + out_xl, out_xg = 0, 0 + + if self.gated: + total_input_parts = [x_l] + if torch.is_tensor(x_g): + total_input_parts.append(x_g) + total_input = torch.cat(total_input_parts, dim=1) + + gates = torch.sigmoid(self.gate(total_input)) + g2l_gate, l2g_gate = gates.chunk(2, dim=1) + else: + g2l_gate, l2g_gate = 1, 1 + + if self.ratio_gout != 1: + out_xl = self.convl2l(x_l) + self.convg2l(x_g) * g2l_gate + if self.ratio_gout != 0: + out_xg = self.convl2g(x_l) * l2g_gate + self.convg2g(x_g) + + return out_xl, out_xg + + +class FFC_BN_ACT(nn.Module): + def __init__( + self, + in_channels, + out_channels, + kernel_size, + ratio_gin, + ratio_gout, + stride=1, + padding=0, + dilation=1, + groups=1, + bias=False, + norm_layer=nn.BatchNorm2d, + activation_layer=nn.Identity, + padding_type="reflect", + enable_lfu=True, + **kwargs, + ): + super(FFC_BN_ACT, self).__init__() + self.ffc = FFC( + in_channels, + out_channels, + kernel_size, + ratio_gin, + ratio_gout, + stride, + padding, + dilation, + groups, + bias, + enable_lfu, + padding_type=padding_type, + **kwargs, + ) + lnorm = nn.Identity if ratio_gout == 1 else norm_layer + gnorm = nn.Identity if ratio_gout == 0 else norm_layer + global_channels = int(out_channels * ratio_gout) + self.bn_l = lnorm(out_channels - global_channels) + self.bn_g = gnorm(global_channels) + + lact = nn.Identity if ratio_gout == 1 else activation_layer + gact = nn.Identity if ratio_gout == 0 else activation_layer + self.act_l = lact(inplace=True) + self.act_g = gact(inplace=True) + + def forward(self, x): + x_l, x_g = self.ffc(x) + x_l = self.act_l(self.bn_l(x_l)) + x_g = self.act_g(self.bn_g(x_g)) + return x_l, x_g + + +class FFCResnetBlock(nn.Module): + def __init__( + self, + dim, + padding_type, + norm_layer, + activation_layer=nn.ReLU, + dilation=1, + spatial_transform_kwargs=None, + inline=False, + **conv_kwargs, + ): + super().__init__() + self.conv1 = FFC_BN_ACT( + dim, + dim, + kernel_size=3, + padding=dilation, + dilation=dilation, + norm_layer=norm_layer, + activation_layer=activation_layer, + padding_type=padding_type, + **conv_kwargs, + ) + self.conv2 = FFC_BN_ACT( + dim, + dim, + kernel_size=3, + padding=dilation, + dilation=dilation, + norm_layer=norm_layer, + activation_layer=activation_layer, + padding_type=padding_type, + **conv_kwargs, + ) + if spatial_transform_kwargs is not None: + self.conv1 = LearnableSpatialTransformWrapper( + self.conv1, **spatial_transform_kwargs + ) + self.conv2 = LearnableSpatialTransformWrapper( + self.conv2, **spatial_transform_kwargs + ) + self.inline = inline + + def forward(self, x): + if self.inline: + x_l, x_g = ( + x[:, : -self.conv1.ffc.global_in_num], + x[:, -self.conv1.ffc.global_in_num :], + ) + else: + x_l, x_g = x if type(x) is tuple else (x, 0) + + id_l, id_g = x_l, x_g + + x_l, x_g = self.conv1((x_l, x_g)) + x_l, x_g = self.conv2((x_l, x_g)) + + x_l, x_g = id_l + x_l, id_g + x_g + out = x_l, x_g + if self.inline: + out = torch.cat(out, dim=1) + return out + + +class ConcatTupleLayer(nn.Module): + def forward(self, x): + assert isinstance(x, tuple) + x_l, x_g = x + assert torch.is_tensor(x_l) or torch.is_tensor(x_g) + if not torch.is_tensor(x_g): + return x_l + return torch.cat(x, dim=1) + + +class FFCResNetGenerator(nn.Module): + def __init__( + self, + input_nc, + output_nc, + ngf=64, + n_downsampling=3, + n_blocks=18, + norm_layer=nn.BatchNorm2d, + padding_type="reflect", + activation_layer=nn.ReLU, + up_norm_layer=nn.BatchNorm2d, + up_activation=nn.ReLU(True), + init_conv_kwargs={}, + downsample_conv_kwargs={}, + resnet_conv_kwargs={}, + spatial_transform_layers=None, + spatial_transform_kwargs={}, + max_features=1024, + out_ffc=False, + out_ffc_kwargs={}, + ): + assert n_blocks >= 0 + super().__init__() + """ + init_conv_kwargs = {'ratio_gin': 0, 'ratio_gout': 0, 'enable_lfu': False} + downsample_conv_kwargs = {'ratio_gin': '${generator.init_conv_kwargs.ratio_gout}', 'ratio_gout': '${generator.downsample_conv_kwargs.ratio_gin}', 'enable_lfu': False} + resnet_conv_kwargs = {'ratio_gin': 0.75, 'ratio_gout': '${generator.resnet_conv_kwargs.ratio_gin}', 'enable_lfu': False} + spatial_transform_kwargs = {} + out_ffc_kwargs = {} + """ + """ + print(input_nc, output_nc, ngf, n_downsampling, n_blocks, norm_layer, + padding_type, activation_layer, + up_norm_layer, up_activation, + spatial_transform_layers, + add_out_act, max_features, out_ffc, file=sys.stderr) + + 4 3 64 3 18 + reflect + + ReLU(inplace=True) + None sigmoid 1024 False + """ + init_conv_kwargs = {"ratio_gin": 0, "ratio_gout": 0, "enable_lfu": False} + downsample_conv_kwargs = {"ratio_gin": 0, "ratio_gout": 0, "enable_lfu": False} + resnet_conv_kwargs = { + "ratio_gin": 0.75, + "ratio_gout": 0.75, + "enable_lfu": False, + } + spatial_transform_kwargs = {} + out_ffc_kwargs = {} + + model = [ + nn.ReflectionPad2d(3), + FFC_BN_ACT( + input_nc, + ngf, + kernel_size=7, + padding=0, + norm_layer=norm_layer, + activation_layer=activation_layer, + **init_conv_kwargs, + ), + ] + + ### downsample + for i in range(n_downsampling): + mult = 2**i + if i == n_downsampling - 1: + cur_conv_kwargs = dict(downsample_conv_kwargs) + cur_conv_kwargs["ratio_gout"] = resnet_conv_kwargs.get("ratio_gin", 0) + else: + cur_conv_kwargs = downsample_conv_kwargs + model += [ + FFC_BN_ACT( + min(max_features, ngf * mult), + min(max_features, ngf * mult * 2), + kernel_size=3, + stride=2, + padding=1, + norm_layer=norm_layer, + activation_layer=activation_layer, + **cur_conv_kwargs, + ) + ] + + mult = 2**n_downsampling + feats_num_bottleneck = min(max_features, ngf * mult) + + ### resnet blocks + for i in range(n_blocks): + cur_resblock = FFCResnetBlock( + feats_num_bottleneck, + padding_type=padding_type, + activation_layer=activation_layer, + norm_layer=norm_layer, + **resnet_conv_kwargs, + ) + if spatial_transform_layers is not None and i in spatial_transform_layers: + cur_resblock = LearnableSpatialTransformWrapper( + cur_resblock, **spatial_transform_kwargs + ) + model += [cur_resblock] + + model += [ConcatTupleLayer()] + + ### upsample + for i in range(n_downsampling): + mult = 2 ** (n_downsampling - i) + model += [ + nn.ConvTranspose2d( + min(max_features, ngf * mult), + min(max_features, int(ngf * mult / 2)), + kernel_size=3, + stride=2, + padding=1, + output_padding=1, + ), + up_norm_layer(min(max_features, int(ngf * mult / 2))), + up_activation, + ] + + if out_ffc: + model += [ + FFCResnetBlock( + ngf, + padding_type=padding_type, + activation_layer=activation_layer, + norm_layer=norm_layer, + inline=True, + **out_ffc_kwargs, + ) + ] + + model += [ + nn.ReflectionPad2d(3), + nn.Conv2d(ngf, output_nc, kernel_size=7, padding=0), + ] + model.append(nn.Sigmoid()) + self.model = nn.Sequential(*model) + + def forward(self, image, mask): + return self.model(torch.cat([image, mask], dim=1)) + + +class LaMa(nn.Module): + def __init__(self, state_dict) -> None: + super(LaMa, self).__init__() + self.model_arch = "LaMa" + self.sub_type = "Inpaint" + self.in_nc = 4 + self.out_nc = 3 + self.scale = 1 + + self.min_size = None + self.pad_mod = 8 + self.pad_to_square = False + + self.model = FFCResNetGenerator(self.in_nc, self.out_nc) + self.state = { + k.replace("generator.model", "model.model"): v + for k, v in state_dict.items() + } + + self.supports_fp16 = False + self.support_bf16 = True + + self.load_state_dict(self.state, strict=False) + + def forward(self, img, mask): + masked_img = img * (1 - mask) + inpainted_mask = mask * self.model.forward(masked_img, mask) + result = inpainted_mask + (1 - mask) * img + return result diff --git a/ldm_patched/pfn/architecture/OmniSR/ChannelAttention.py b/ldm_patched/pfn/architecture/OmniSR/ChannelAttention.py new file mode 100644 index 0000000000000000000000000000000000000000..f4d52aa1e063d274b7aec7bd1ace77b19eb2ca61 --- /dev/null +++ b/ldm_patched/pfn/architecture/OmniSR/ChannelAttention.py @@ -0,0 +1,110 @@ +import math + +import torch.nn as nn + + +class CA_layer(nn.Module): + def __init__(self, channel, reduction=16): + super(CA_layer, self).__init__() + # global average pooling + self.gap = nn.AdaptiveAvgPool2d(1) + self.fc = nn.Sequential( + nn.Conv2d(channel, channel // reduction, kernel_size=(1, 1), bias=False), + nn.GELU(), + nn.Conv2d(channel // reduction, channel, kernel_size=(1, 1), bias=False), + # nn.Sigmoid() + ) + + def forward(self, x): + y = self.fc(self.gap(x)) + return x * y.expand_as(x) + + +class Simple_CA_layer(nn.Module): + def __init__(self, channel): + super(Simple_CA_layer, self).__init__() + self.gap = nn.AdaptiveAvgPool2d(1) + self.fc = nn.Conv2d( + in_channels=channel, + out_channels=channel, + kernel_size=1, + padding=0, + stride=1, + groups=1, + bias=True, + ) + + def forward(self, x): + return x * self.fc(self.gap(x)) + + +class ECA_layer(nn.Module): + """Constructs a ECA module. + Args: + channel: Number of channels of the input feature map + k_size: Adaptive selection of kernel size + """ + + def __init__(self, channel): + super(ECA_layer, self).__init__() + + b = 1 + gamma = 2 + k_size = int(abs(math.log(channel, 2) + b) / gamma) + k_size = k_size if k_size % 2 else k_size + 1 + self.avg_pool = nn.AdaptiveAvgPool2d(1) + self.conv = nn.Conv1d( + 1, 1, kernel_size=k_size, padding=(k_size - 1) // 2, bias=False + ) + # self.sigmoid = nn.Sigmoid() + + def forward(self, x): + # x: input features with shape [b, c, h, w] + # b, c, h, w = x.size() + + # feature descriptor on the global spatial information + y = self.avg_pool(x) + + # Two different branches of ECA module + y = self.conv(y.squeeze(-1).transpose(-1, -2)).transpose(-1, -2).unsqueeze(-1) + + # Multi-scale information fusion + # y = self.sigmoid(y) + + return x * y.expand_as(x) + + +class ECA_MaxPool_layer(nn.Module): + """Constructs a ECA module. + Args: + channel: Number of channels of the input feature map + k_size: Adaptive selection of kernel size + """ + + def __init__(self, channel): + super(ECA_MaxPool_layer, self).__init__() + + b = 1 + gamma = 2 + k_size = int(abs(math.log(channel, 2) + b) / gamma) + k_size = k_size if k_size % 2 else k_size + 1 + self.max_pool = nn.AdaptiveMaxPool2d(1) + self.conv = nn.Conv1d( + 1, 1, kernel_size=k_size, padding=(k_size - 1) // 2, bias=False + ) + # self.sigmoid = nn.Sigmoid() + + def forward(self, x): + # x: input features with shape [b, c, h, w] + # b, c, h, w = x.size() + + # feature descriptor on the global spatial information + y = self.max_pool(x) + + # Two different branches of ECA module + y = self.conv(y.squeeze(-1).transpose(-1, -2)).transpose(-1, -2).unsqueeze(-1) + + # Multi-scale information fusion + # y = self.sigmoid(y) + + return x * y.expand_as(x) diff --git a/ldm_patched/pfn/architecture/OmniSR/LICENSE b/ldm_patched/pfn/architecture/OmniSR/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64 --- /dev/null +++ b/ldm_patched/pfn/architecture/OmniSR/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/ldm_patched/pfn/architecture/OmniSR/OSA.py b/ldm_patched/pfn/architecture/OmniSR/OSA.py new file mode 100644 index 0000000000000000000000000000000000000000..d7a129696b254b022fa6fc54dc85befcc19ffc2c --- /dev/null +++ b/ldm_patched/pfn/architecture/OmniSR/OSA.py @@ -0,0 +1,577 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +############################################################# +# File: OSA.py +# Created Date: Tuesday April 28th 2022 +# Author: Chen Xuanhong +# Email: chenxuanhongzju@outlook.com +# Last Modified: Sunday, 23rd April 2023 3:07:42 pm +# Modified By: Chen Xuanhong +# Copyright (c) 2020 Shanghai Jiao Tong University +############################################################# + +import torch +import torch.nn.functional as F +from einops import rearrange, repeat +from einops.layers.torch import Rearrange, Reduce +from torch import einsum, nn + +from .layernorm import LayerNorm2d + +# helpers + + +def exists(val): + return val is not None + + +def default(val, d): + return val if exists(val) else d + + +def cast_tuple(val, length=1): + return val if isinstance(val, tuple) else ((val,) * length) + + +# helper classes + + +class PreNormResidual(nn.Module): + def __init__(self, dim, fn): + super().__init__() + self.norm = nn.LayerNorm(dim) + self.fn = fn + + def forward(self, x): + return self.fn(self.norm(x)) + x + + +class Conv_PreNormResidual(nn.Module): + def __init__(self, dim, fn): + super().__init__() + self.norm = LayerNorm2d(dim) + self.fn = fn + + def forward(self, x): + return self.fn(self.norm(x)) + x + + +class FeedForward(nn.Module): + def __init__(self, dim, mult=2, dropout=0.0): + super().__init__() + inner_dim = int(dim * mult) + self.net = nn.Sequential( + nn.Linear(dim, inner_dim), + nn.GELU(), + nn.Dropout(dropout), + nn.Linear(inner_dim, dim), + nn.Dropout(dropout), + ) + + def forward(self, x): + return self.net(x) + + +class Conv_FeedForward(nn.Module): + def __init__(self, dim, mult=2, dropout=0.0): + super().__init__() + inner_dim = int(dim * mult) + self.net = nn.Sequential( + nn.Conv2d(dim, inner_dim, 1, 1, 0), + nn.GELU(), + nn.Dropout(dropout), + nn.Conv2d(inner_dim, dim, 1, 1, 0), + nn.Dropout(dropout), + ) + + def forward(self, x): + return self.net(x) + + +class Gated_Conv_FeedForward(nn.Module): + def __init__(self, dim, mult=1, bias=False, dropout=0.0): + super().__init__() + + hidden_features = int(dim * mult) + + self.project_in = nn.Conv2d(dim, hidden_features * 2, kernel_size=1, bias=bias) + + self.dwconv = nn.Conv2d( + hidden_features * 2, + hidden_features * 2, + kernel_size=3, + stride=1, + padding=1, + groups=hidden_features * 2, + bias=bias, + ) + + self.project_out = nn.Conv2d(hidden_features, dim, kernel_size=1, bias=bias) + + def forward(self, x): + x = self.project_in(x) + x1, x2 = self.dwconv(x).chunk(2, dim=1) + x = F.gelu(x1) * x2 + x = self.project_out(x) + return x + + +# MBConv + + +class SqueezeExcitation(nn.Module): + def __init__(self, dim, shrinkage_rate=0.25): + super().__init__() + hidden_dim = int(dim * shrinkage_rate) + + self.gate = nn.Sequential( + Reduce("b c h w -> b c", "mean"), + nn.Linear(dim, hidden_dim, bias=False), + nn.SiLU(), + nn.Linear(hidden_dim, dim, bias=False), + nn.Sigmoid(), + Rearrange("b c -> b c 1 1"), + ) + + def forward(self, x): + return x * self.gate(x) + + +class MBConvResidual(nn.Module): + def __init__(self, fn, dropout=0.0): + super().__init__() + self.fn = fn + self.dropsample = Dropsample(dropout) + + def forward(self, x): + out = self.fn(x) + out = self.dropsample(out) + return out + x + + +class Dropsample(nn.Module): + def __init__(self, prob=0): + super().__init__() + self.prob = prob + + def forward(self, x): + device = x.device + + if self.prob == 0.0 or (not self.training): + return x + + keep_mask = ( + torch.FloatTensor((x.shape[0], 1, 1, 1), device=device).uniform_() + > self.prob + ) + return x * keep_mask / (1 - self.prob) + + +def MBConv( + dim_in, dim_out, *, downsample, expansion_rate=4, shrinkage_rate=0.25, dropout=0.0 +): + hidden_dim = int(expansion_rate * dim_out) + stride = 2 if downsample else 1 + + net = nn.Sequential( + nn.Conv2d(dim_in, hidden_dim, 1), + # nn.BatchNorm2d(hidden_dim), + nn.GELU(), + nn.Conv2d( + hidden_dim, hidden_dim, 3, stride=stride, padding=1, groups=hidden_dim + ), + # nn.BatchNorm2d(hidden_dim), + nn.GELU(), + SqueezeExcitation(hidden_dim, shrinkage_rate=shrinkage_rate), + nn.Conv2d(hidden_dim, dim_out, 1), + # nn.BatchNorm2d(dim_out) + ) + + if dim_in == dim_out and not downsample: + net = MBConvResidual(net, dropout=dropout) + + return net + + +# attention related classes +class Attention(nn.Module): + def __init__( + self, + dim, + dim_head=32, + dropout=0.0, + window_size=7, + with_pe=True, + ): + super().__init__() + assert ( + dim % dim_head + ) == 0, "dimension should be divisible by dimension per head" + + self.heads = dim // dim_head + self.scale = dim_head**-0.5 + self.with_pe = with_pe + + self.to_qkv = nn.Linear(dim, dim * 3, bias=False) + + self.attend = nn.Sequential(nn.Softmax(dim=-1), nn.Dropout(dropout)) + + self.to_out = nn.Sequential( + nn.Linear(dim, dim, bias=False), nn.Dropout(dropout) + ) + + # relative positional bias + if self.with_pe: + self.rel_pos_bias = nn.Embedding((2 * window_size - 1) ** 2, self.heads) + + pos = torch.arange(window_size) + grid = torch.stack(torch.meshgrid(pos, pos)) + grid = rearrange(grid, "c i j -> (i j) c") + rel_pos = rearrange(grid, "i ... -> i 1 ...") - rearrange( + grid, "j ... -> 1 j ..." + ) + rel_pos += window_size - 1 + rel_pos_indices = (rel_pos * torch.tensor([2 * window_size - 1, 1])).sum( + dim=-1 + ) + + self.register_buffer("rel_pos_indices", rel_pos_indices, persistent=False) + + def forward(self, x): + batch, height, width, window_height, window_width, _, device, h = ( + *x.shape, + x.device, + self.heads, + ) + + # flatten + + x = rearrange(x, "b x y w1 w2 d -> (b x y) (w1 w2) d") + + # project for queries, keys, values + + q, k, v = self.to_qkv(x).chunk(3, dim=-1) + + # split heads + + q, k, v = map(lambda t: rearrange(t, "b n (h d ) -> b h n d", h=h), (q, k, v)) + + # scale + + q = q * self.scale + + # sim + + sim = einsum("b h i d, b h j d -> b h i j", q, k) + + # add positional bias + if self.with_pe: + bias = self.rel_pos_bias(self.rel_pos_indices) + sim = sim + rearrange(bias, "i j h -> h i j") + + # attention + + attn = self.attend(sim) + + # aggregate + + out = einsum("b h i j, b h j d -> b h i d", attn, v) + + # merge heads + + out = rearrange( + out, "b h (w1 w2) d -> b w1 w2 (h d)", w1=window_height, w2=window_width + ) + + # combine heads out + + out = self.to_out(out) + return rearrange(out, "(b x y) ... -> b x y ...", x=height, y=width) + + +class Block_Attention(nn.Module): + def __init__( + self, + dim, + dim_head=32, + bias=False, + dropout=0.0, + window_size=7, + with_pe=True, + ): + super().__init__() + assert ( + dim % dim_head + ) == 0, "dimension should be divisible by dimension per head" + + self.heads = dim // dim_head + self.ps = window_size + self.scale = dim_head**-0.5 + self.with_pe = with_pe + + self.qkv = nn.Conv2d(dim, dim * 3, kernel_size=1, bias=bias) + self.qkv_dwconv = nn.Conv2d( + dim * 3, + dim * 3, + kernel_size=3, + stride=1, + padding=1, + groups=dim * 3, + bias=bias, + ) + + self.attend = nn.Sequential(nn.Softmax(dim=-1), nn.Dropout(dropout)) + + self.to_out = nn.Conv2d(dim, dim, kernel_size=1, bias=bias) + + def forward(self, x): + # project for queries, keys, values + b, c, h, w = x.shape + + qkv = self.qkv_dwconv(self.qkv(x)) + q, k, v = qkv.chunk(3, dim=1) + + # split heads + + q, k, v = map( + lambda t: rearrange( + t, + "b (h d) (x w1) (y w2) -> (b x y) h (w1 w2) d", + h=self.heads, + w1=self.ps, + w2=self.ps, + ), + (q, k, v), + ) + + # scale + + q = q * self.scale + + # sim + + sim = einsum("b h i d, b h j d -> b h i j", q, k) + + # attention + attn = self.attend(sim) + + # aggregate + + out = einsum("b h i j, b h j d -> b h i d", attn, v) + + # merge heads + out = rearrange( + out, + "(b x y) head (w1 w2) d -> b (head d) (x w1) (y w2)", + x=h // self.ps, + y=w // self.ps, + head=self.heads, + w1=self.ps, + w2=self.ps, + ) + + out = self.to_out(out) + return out + + +class Channel_Attention(nn.Module): + def __init__(self, dim, heads, bias=False, dropout=0.0, window_size=7): + super(Channel_Attention, self).__init__() + self.heads = heads + + self.temperature = nn.Parameter(torch.ones(heads, 1, 1)) + + self.ps = window_size + + self.qkv = nn.Conv2d(dim, dim * 3, kernel_size=1, bias=bias) + self.qkv_dwconv = nn.Conv2d( + dim * 3, + dim * 3, + kernel_size=3, + stride=1, + padding=1, + groups=dim * 3, + bias=bias, + ) + self.project_out = nn.Conv2d(dim, dim, kernel_size=1, bias=bias) + + def forward(self, x): + b, c, h, w = x.shape + + qkv = self.qkv_dwconv(self.qkv(x)) + qkv = qkv.chunk(3, dim=1) + + q, k, v = map( + lambda t: rearrange( + t, + "b (head d) (h ph) (w pw) -> b (h w) head d (ph pw)", + ph=self.ps, + pw=self.ps, + head=self.heads, + ), + qkv, + ) + + q = F.normalize(q, dim=-1) + k = F.normalize(k, dim=-1) + + attn = (q @ k.transpose(-2, -1)) * self.temperature + attn = attn.softmax(dim=-1) + out = attn @ v + + out = rearrange( + out, + "b (h w) head d (ph pw) -> b (head d) (h ph) (w pw)", + h=h // self.ps, + w=w // self.ps, + ph=self.ps, + pw=self.ps, + head=self.heads, + ) + + out = self.project_out(out) + + return out + + +class Channel_Attention_grid(nn.Module): + def __init__(self, dim, heads, bias=False, dropout=0.0, window_size=7): + super(Channel_Attention_grid, self).__init__() + self.heads = heads + + self.temperature = nn.Parameter(torch.ones(heads, 1, 1)) + + self.ps = window_size + + self.qkv = nn.Conv2d(dim, dim * 3, kernel_size=1, bias=bias) + self.qkv_dwconv = nn.Conv2d( + dim * 3, + dim * 3, + kernel_size=3, + stride=1, + padding=1, + groups=dim * 3, + bias=bias, + ) + self.project_out = nn.Conv2d(dim, dim, kernel_size=1, bias=bias) + + def forward(self, x): + b, c, h, w = x.shape + + qkv = self.qkv_dwconv(self.qkv(x)) + qkv = qkv.chunk(3, dim=1) + + q, k, v = map( + lambda t: rearrange( + t, + "b (head d) (h ph) (w pw) -> b (ph pw) head d (h w)", + ph=self.ps, + pw=self.ps, + head=self.heads, + ), + qkv, + ) + + q = F.normalize(q, dim=-1) + k = F.normalize(k, dim=-1) + + attn = (q @ k.transpose(-2, -1)) * self.temperature + attn = attn.softmax(dim=-1) + out = attn @ v + + out = rearrange( + out, + "b (ph pw) head d (h w) -> b (head d) (h ph) (w pw)", + h=h // self.ps, + w=w // self.ps, + ph=self.ps, + pw=self.ps, + head=self.heads, + ) + + out = self.project_out(out) + + return out + + +class OSA_Block(nn.Module): + def __init__( + self, + channel_num=64, + bias=True, + ffn_bias=True, + window_size=8, + with_pe=False, + dropout=0.0, + ): + super(OSA_Block, self).__init__() + + w = window_size + + self.layer = nn.Sequential( + MBConv( + channel_num, + channel_num, + downsample=False, + expansion_rate=1, + shrinkage_rate=0.25, + ), + Rearrange( + "b d (x w1) (y w2) -> b x y w1 w2 d", w1=w, w2=w + ), # block-like attention + PreNormResidual( + channel_num, + Attention( + dim=channel_num, + dim_head=channel_num // 4, + dropout=dropout, + window_size=window_size, + with_pe=with_pe, + ), + ), + Rearrange("b x y w1 w2 d -> b d (x w1) (y w2)"), + Conv_PreNormResidual( + channel_num, Gated_Conv_FeedForward(dim=channel_num, dropout=dropout) + ), + # channel-like attention + Conv_PreNormResidual( + channel_num, + Channel_Attention( + dim=channel_num, heads=4, dropout=dropout, window_size=window_size + ), + ), + Conv_PreNormResidual( + channel_num, Gated_Conv_FeedForward(dim=channel_num, dropout=dropout) + ), + Rearrange( + "b d (w1 x) (w2 y) -> b x y w1 w2 d", w1=w, w2=w + ), # grid-like attention + PreNormResidual( + channel_num, + Attention( + dim=channel_num, + dim_head=channel_num // 4, + dropout=dropout, + window_size=window_size, + with_pe=with_pe, + ), + ), + Rearrange("b x y w1 w2 d -> b d (w1 x) (w2 y)"), + Conv_PreNormResidual( + channel_num, Gated_Conv_FeedForward(dim=channel_num, dropout=dropout) + ), + # channel-like attention + Conv_PreNormResidual( + channel_num, + Channel_Attention_grid( + dim=channel_num, heads=4, dropout=dropout, window_size=window_size + ), + ), + Conv_PreNormResidual( + channel_num, Gated_Conv_FeedForward(dim=channel_num, dropout=dropout) + ), + ) + + def forward(self, x): + out = self.layer(x) + return out diff --git a/ldm_patched/pfn/architecture/OmniSR/OSAG.py b/ldm_patched/pfn/architecture/OmniSR/OSAG.py new file mode 100644 index 0000000000000000000000000000000000000000..477e81f9da4eb1db9b5ec418549d75dd591209ec --- /dev/null +++ b/ldm_patched/pfn/architecture/OmniSR/OSAG.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +############################################################# +# File: OSAG.py +# Created Date: Tuesday April 28th 2022 +# Author: Chen Xuanhong +# Email: chenxuanhongzju@outlook.com +# Last Modified: Sunday, 23rd April 2023 3:08:49 pm +# Modified By: Chen Xuanhong +# Copyright (c) 2020 Shanghai Jiao Tong University +############################################################# + + +import torch.nn as nn + +from .esa import ESA +from .OSA import OSA_Block + + +class OSAG(nn.Module): + def __init__( + self, + channel_num=64, + bias=True, + block_num=4, + ffn_bias=False, + window_size=0, + pe=False, + ): + super(OSAG, self).__init__() + + # print("window_size: %d" % (window_size)) + # print("with_pe", pe) + # print("ffn_bias: %d" % (ffn_bias)) + + # block_script_name = kwargs.get("block_script_name", "OSA") + # block_class_name = kwargs.get("block_class_name", "OSA_Block") + + # script_name = "." + block_script_name + # package = __import__(script_name, fromlist=True) + block_class = OSA_Block # getattr(package, block_class_name) + group_list = [] + for _ in range(block_num): + temp_res = block_class( + channel_num, + bias, + ffn_bias=ffn_bias, + window_size=window_size, + with_pe=pe, + ) + group_list.append(temp_res) + group_list.append(nn.Conv2d(channel_num, channel_num, 1, 1, 0, bias=bias)) + self.residual_layer = nn.Sequential(*group_list) + esa_channel = max(channel_num // 4, 16) + self.esa = ESA(esa_channel, channel_num) + + def forward(self, x): + out = self.residual_layer(x) + out = out + x + return self.esa(out) diff --git a/ldm_patched/pfn/architecture/OmniSR/OmniSR.py b/ldm_patched/pfn/architecture/OmniSR/OmniSR.py new file mode 100644 index 0000000000000000000000000000000000000000..1e1c3f35e657fb972d4209456719a61163831385 --- /dev/null +++ b/ldm_patched/pfn/architecture/OmniSR/OmniSR.py @@ -0,0 +1,143 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +############################################################# +# File: OmniSR.py +# Created Date: Tuesday April 28th 2022 +# Author: Chen Xuanhong +# Email: chenxuanhongzju@outlook.com +# Last Modified: Sunday, 23rd April 2023 3:06:36 pm +# Modified By: Chen Xuanhong +# Copyright (c) 2020 Shanghai Jiao Tong University +############################################################# + +import math + +import torch +import torch.nn as nn +import torch.nn.functional as F + +from .OSAG import OSAG +from .pixelshuffle import pixelshuffle_block + + +class OmniSR(nn.Module): + def __init__( + self, + state_dict, + **kwargs, + ): + super(OmniSR, self).__init__() + self.state = state_dict + + bias = True # Fine to assume this for now + block_num = 1 # Fine to assume this for now + ffn_bias = True + pe = True + + num_feat = state_dict["input.weight"].shape[0] or 64 + num_in_ch = state_dict["input.weight"].shape[1] or 3 + num_out_ch = num_in_ch # we can just assume this for now. pixelshuffle smh + + pixelshuffle_shape = state_dict["up.0.weight"].shape[0] + up_scale = math.sqrt(pixelshuffle_shape / num_out_ch) + if up_scale - int(up_scale) > 0: + print( + "out_nc is probably different than in_nc, scale calculation might be wrong" + ) + up_scale = int(up_scale) + res_num = 0 + for key in state_dict.keys(): + if "residual_layer" in key: + temp_res_num = int(key.split(".")[1]) + if temp_res_num > res_num: + res_num = temp_res_num + res_num = res_num + 1 # zero-indexed + + residual_layer = [] + self.res_num = res_num + + if ( + "residual_layer.0.residual_layer.0.layer.2.fn.rel_pos_bias.weight" + in state_dict.keys() + ): + rel_pos_bias_weight = state_dict[ + "residual_layer.0.residual_layer.0.layer.2.fn.rel_pos_bias.weight" + ].shape[0] + self.window_size = int((math.sqrt(rel_pos_bias_weight) + 1) / 2) + else: + self.window_size = 8 + + self.up_scale = up_scale + + for _ in range(res_num): + temp_res = OSAG( + channel_num=num_feat, + bias=bias, + block_num=block_num, + ffn_bias=ffn_bias, + window_size=self.window_size, + pe=pe, + ) + residual_layer.append(temp_res) + self.residual_layer = nn.Sequential(*residual_layer) + self.input = nn.Conv2d( + in_channels=num_in_ch, + out_channels=num_feat, + kernel_size=3, + stride=1, + padding=1, + bias=bias, + ) + self.output = nn.Conv2d( + in_channels=num_feat, + out_channels=num_feat, + kernel_size=3, + stride=1, + padding=1, + bias=bias, + ) + self.up = pixelshuffle_block(num_feat, num_out_ch, up_scale, bias=bias) + + # self.tail = pixelshuffle_block(num_feat,num_out_ch,up_scale,bias=bias) + + # for m in self.modules(): + # if isinstance(m, nn.Conv2d): + # n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + # m.weight.data.normal_(0, sqrt(2. / n)) + + # chaiNNer specific stuff + self.model_arch = "OmniSR" + self.sub_type = "SR" + self.in_nc = num_in_ch + self.out_nc = num_out_ch + self.num_feat = num_feat + self.scale = up_scale + + self.supports_fp16 = True # TODO: Test this + self.supports_bfp16 = True + self.min_size_restriction = 16 + + self.load_state_dict(state_dict, strict=False) + + def check_image_size(self, x): + _, _, h, w = x.size() + # import pdb; pdb.set_trace() + mod_pad_h = (self.window_size - h % self.window_size) % self.window_size + mod_pad_w = (self.window_size - w % self.window_size) % self.window_size + # x = F.pad(x, (0, mod_pad_w, 0, mod_pad_h), 'reflect') + x = F.pad(x, (0, mod_pad_w, 0, mod_pad_h), "constant", 0) + return x + + def forward(self, x): + H, W = x.shape[2:] + x = self.check_image_size(x) + + residual = self.input(x) + out = self.residual_layer(residual) + + # origin + out = torch.add(self.output(out), residual) + out = self.up(out) + + out = out[:, :, : H * self.up_scale, : W * self.up_scale] + return out diff --git a/ldm_patched/pfn/architecture/OmniSR/__pycache__/OSA.cpython-310.pyc b/ldm_patched/pfn/architecture/OmniSR/__pycache__/OSA.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b9c94a299ef2bb012069cb5f36f5ec7b96960fd4 Binary files /dev/null and b/ldm_patched/pfn/architecture/OmniSR/__pycache__/OSA.cpython-310.pyc differ diff --git a/ldm_patched/pfn/architecture/OmniSR/__pycache__/OSAG.cpython-310.pyc b/ldm_patched/pfn/architecture/OmniSR/__pycache__/OSAG.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0bd34319c4ef8100a4107df80aa616d92df46a87 Binary files /dev/null and b/ldm_patched/pfn/architecture/OmniSR/__pycache__/OSAG.cpython-310.pyc differ diff --git a/ldm_patched/pfn/architecture/OmniSR/__pycache__/OmniSR.cpython-310.pyc b/ldm_patched/pfn/architecture/OmniSR/__pycache__/OmniSR.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..14509f1e89005e5bcec53b54b4d6c9c67f968873 Binary files /dev/null and b/ldm_patched/pfn/architecture/OmniSR/__pycache__/OmniSR.cpython-310.pyc differ diff --git a/ldm_patched/pfn/architecture/OmniSR/__pycache__/esa.cpython-310.pyc b/ldm_patched/pfn/architecture/OmniSR/__pycache__/esa.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1843ab7e423429fb63ae861246f3b36b107e5732 Binary files /dev/null and b/ldm_patched/pfn/architecture/OmniSR/__pycache__/esa.cpython-310.pyc differ diff --git a/ldm_patched/pfn/architecture/OmniSR/__pycache__/layernorm.cpython-310.pyc b/ldm_patched/pfn/architecture/OmniSR/__pycache__/layernorm.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f0c3df2a6e0e11e80e89dc86a3287af8e9bcc817 Binary files /dev/null and b/ldm_patched/pfn/architecture/OmniSR/__pycache__/layernorm.cpython-310.pyc differ diff --git a/ldm_patched/pfn/architecture/OmniSR/__pycache__/pixelshuffle.cpython-310.pyc b/ldm_patched/pfn/architecture/OmniSR/__pycache__/pixelshuffle.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0eafa3a2a6e65070e504924035c7a36f6ad62d28 Binary files /dev/null and b/ldm_patched/pfn/architecture/OmniSR/__pycache__/pixelshuffle.cpython-310.pyc differ diff --git a/ldm_patched/pfn/architecture/OmniSR/esa.py b/ldm_patched/pfn/architecture/OmniSR/esa.py new file mode 100644 index 0000000000000000000000000000000000000000..f9ce7f7a60bfe20b3737eaa2e3110fd460a2d104 --- /dev/null +++ b/ldm_patched/pfn/architecture/OmniSR/esa.py @@ -0,0 +1,294 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +############################################################# +# File: esa.py +# Created Date: Tuesday April 28th 2022 +# Author: Chen Xuanhong +# Email: chenxuanhongzju@outlook.com +# Last Modified: Thursday, 20th April 2023 9:28:06 am +# Modified By: Chen Xuanhong +# Copyright (c) 2020 Shanghai Jiao Tong University +############################################################# + +import torch +import torch.nn as nn +import torch.nn.functional as F + +from .layernorm import LayerNorm2d + + +def moment(x, dim=(2, 3), k=2): + assert len(x.size()) == 4 + mean = torch.mean(x, dim=dim).unsqueeze(-1).unsqueeze(-1) + mk = (1 / (x.size(2) * x.size(3))) * torch.sum(torch.pow(x - mean, k), dim=dim) + return mk + + +class ESA(nn.Module): + """ + Modification of Enhanced Spatial Attention (ESA), which is proposed by + `Residual Feature Aggregation Network for Image Super-Resolution` + Note: `conv_max` and `conv3_` are NOT used here, so the corresponding codes + are deleted. + """ + + def __init__(self, esa_channels, n_feats, conv=nn.Conv2d): + super(ESA, self).__init__() + f = esa_channels + self.conv1 = conv(n_feats, f, kernel_size=1) + self.conv_f = conv(f, f, kernel_size=1) + self.conv2 = conv(f, f, kernel_size=3, stride=2, padding=0) + self.conv3 = conv(f, f, kernel_size=3, padding=1) + self.conv4 = conv(f, n_feats, kernel_size=1) + self.sigmoid = nn.Sigmoid() + self.relu = nn.ReLU(inplace=True) + + def forward(self, x): + c1_ = self.conv1(x) + c1 = self.conv2(c1_) + v_max = F.max_pool2d(c1, kernel_size=7, stride=3) + c3 = self.conv3(v_max) + c3 = F.interpolate( + c3, (x.size(2), x.size(3)), mode="bilinear", align_corners=False + ) + cf = self.conv_f(c1_) + c4 = self.conv4(c3 + cf) + m = self.sigmoid(c4) + return x * m + + +class LK_ESA(nn.Module): + def __init__( + self, esa_channels, n_feats, conv=nn.Conv2d, kernel_expand=1, bias=True + ): + super(LK_ESA, self).__init__() + f = esa_channels + self.conv1 = conv(n_feats, f, kernel_size=1) + self.conv_f = conv(f, f, kernel_size=1) + + kernel_size = 17 + kernel_expand = kernel_expand + padding = kernel_size // 2 + + self.vec_conv = nn.Conv2d( + in_channels=f * kernel_expand, + out_channels=f * kernel_expand, + kernel_size=(1, kernel_size), + padding=(0, padding), + groups=2, + bias=bias, + ) + self.vec_conv3x1 = nn.Conv2d( + in_channels=f * kernel_expand, + out_channels=f * kernel_expand, + kernel_size=(1, 3), + padding=(0, 1), + groups=2, + bias=bias, + ) + + self.hor_conv = nn.Conv2d( + in_channels=f * kernel_expand, + out_channels=f * kernel_expand, + kernel_size=(kernel_size, 1), + padding=(padding, 0), + groups=2, + bias=bias, + ) + self.hor_conv1x3 = nn.Conv2d( + in_channels=f * kernel_expand, + out_channels=f * kernel_expand, + kernel_size=(3, 1), + padding=(1, 0), + groups=2, + bias=bias, + ) + + self.conv4 = conv(f, n_feats, kernel_size=1) + self.sigmoid = nn.Sigmoid() + self.relu = nn.ReLU(inplace=True) + + def forward(self, x): + c1_ = self.conv1(x) + + res = self.vec_conv(c1_) + self.vec_conv3x1(c1_) + res = self.hor_conv(res) + self.hor_conv1x3(res) + + cf = self.conv_f(c1_) + c4 = self.conv4(res + cf) + m = self.sigmoid(c4) + return x * m + + +class LK_ESA_LN(nn.Module): + def __init__( + self, esa_channels, n_feats, conv=nn.Conv2d, kernel_expand=1, bias=True + ): + super(LK_ESA_LN, self).__init__() + f = esa_channels + self.conv1 = conv(n_feats, f, kernel_size=1) + self.conv_f = conv(f, f, kernel_size=1) + + kernel_size = 17 + kernel_expand = kernel_expand + padding = kernel_size // 2 + + self.norm = LayerNorm2d(n_feats) + + self.vec_conv = nn.Conv2d( + in_channels=f * kernel_expand, + out_channels=f * kernel_expand, + kernel_size=(1, kernel_size), + padding=(0, padding), + groups=2, + bias=bias, + ) + self.vec_conv3x1 = nn.Conv2d( + in_channels=f * kernel_expand, + out_channels=f * kernel_expand, + kernel_size=(1, 3), + padding=(0, 1), + groups=2, + bias=bias, + ) + + self.hor_conv = nn.Conv2d( + in_channels=f * kernel_expand, + out_channels=f * kernel_expand, + kernel_size=(kernel_size, 1), + padding=(padding, 0), + groups=2, + bias=bias, + ) + self.hor_conv1x3 = nn.Conv2d( + in_channels=f * kernel_expand, + out_channels=f * kernel_expand, + kernel_size=(3, 1), + padding=(1, 0), + groups=2, + bias=bias, + ) + + self.conv4 = conv(f, n_feats, kernel_size=1) + self.sigmoid = nn.Sigmoid() + self.relu = nn.ReLU(inplace=True) + + def forward(self, x): + c1_ = self.norm(x) + c1_ = self.conv1(c1_) + + res = self.vec_conv(c1_) + self.vec_conv3x1(c1_) + res = self.hor_conv(res) + self.hor_conv1x3(res) + + cf = self.conv_f(c1_) + c4 = self.conv4(res + cf) + m = self.sigmoid(c4) + return x * m + + +class AdaGuidedFilter(nn.Module): + def __init__( + self, esa_channels, n_feats, conv=nn.Conv2d, kernel_expand=1, bias=True + ): + super(AdaGuidedFilter, self).__init__() + + self.gap = nn.AdaptiveAvgPool2d(1) + self.fc = nn.Conv2d( + in_channels=n_feats, + out_channels=1, + kernel_size=1, + padding=0, + stride=1, + groups=1, + bias=True, + ) + + self.r = 5 + + def box_filter(self, x, r): + channel = x.shape[1] + kernel_size = 2 * r + 1 + weight = 1.0 / (kernel_size**2) + box_kernel = weight * torch.ones( + (channel, 1, kernel_size, kernel_size), dtype=torch.float32, device=x.device + ) + output = F.conv2d(x, weight=box_kernel, stride=1, padding=r, groups=channel) + return output + + def forward(self, x): + _, _, H, W = x.shape + N = self.box_filter( + torch.ones((1, 1, H, W), dtype=x.dtype, device=x.device), self.r + ) + + # epsilon = self.fc(self.gap(x)) + # epsilon = torch.pow(epsilon, 2) + epsilon = 1e-2 + + mean_x = self.box_filter(x, self.r) / N + var_x = self.box_filter(x * x, self.r) / N - mean_x * mean_x + + A = var_x / (var_x + epsilon) + b = (1 - A) * mean_x + m = A * x + b + + # mean_A = self.box_filter(A, self.r) / N + # mean_b = self.box_filter(b, self.r) / N + # m = mean_A * x + mean_b + return x * m + + +class AdaConvGuidedFilter(nn.Module): + def __init__( + self, esa_channels, n_feats, conv=nn.Conv2d, kernel_expand=1, bias=True + ): + super(AdaConvGuidedFilter, self).__init__() + f = esa_channels + + self.conv_f = conv(f, f, kernel_size=1) + + kernel_size = 17 + kernel_expand = kernel_expand + padding = kernel_size // 2 + + self.vec_conv = nn.Conv2d( + in_channels=f, + out_channels=f, + kernel_size=(1, kernel_size), + padding=(0, padding), + groups=f, + bias=bias, + ) + + self.hor_conv = nn.Conv2d( + in_channels=f, + out_channels=f, + kernel_size=(kernel_size, 1), + padding=(padding, 0), + groups=f, + bias=bias, + ) + + self.gap = nn.AdaptiveAvgPool2d(1) + self.fc = nn.Conv2d( + in_channels=f, + out_channels=f, + kernel_size=1, + padding=0, + stride=1, + groups=1, + bias=True, + ) + + def forward(self, x): + y = self.vec_conv(x) + y = self.hor_conv(y) + + sigma = torch.pow(y, 2) + epsilon = self.fc(self.gap(y)) + + weight = sigma / (sigma + epsilon) + + m = weight * x + (1 - weight) + + return x * m diff --git a/ldm_patched/pfn/architecture/OmniSR/layernorm.py b/ldm_patched/pfn/architecture/OmniSR/layernorm.py new file mode 100644 index 0000000000000000000000000000000000000000..731a25f7542d45757a284648055d7c6ffad4c3fd --- /dev/null +++ b/ldm_patched/pfn/architecture/OmniSR/layernorm.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +############################################################# +# File: layernorm.py +# Created Date: Tuesday April 28th 2022 +# Author: Chen Xuanhong +# Email: chenxuanhongzju@outlook.com +# Last Modified: Thursday, 20th April 2023 9:28:20 am +# Modified By: Chen Xuanhong +# Copyright (c) 2020 Shanghai Jiao Tong University +############################################################# + +import torch +import torch.nn as nn + + +class LayerNormFunction(torch.autograd.Function): + @staticmethod + def forward(ctx, x, weight, bias, eps): + ctx.eps = eps + N, C, H, W = x.size() + mu = x.mean(1, keepdim=True) + var = (x - mu).pow(2).mean(1, keepdim=True) + y = (x - mu) / (var + eps).sqrt() + ctx.save_for_backward(y, var, weight) + y = weight.view(1, C, 1, 1) * y + bias.view(1, C, 1, 1) + return y + + @staticmethod + def backward(ctx, grad_output): + eps = ctx.eps + + N, C, H, W = grad_output.size() + y, var, weight = ctx.saved_variables + g = grad_output * weight.view(1, C, 1, 1) + mean_g = g.mean(dim=1, keepdim=True) + + mean_gy = (g * y).mean(dim=1, keepdim=True) + gx = 1.0 / torch.sqrt(var + eps) * (g - y * mean_gy - mean_g) + return ( + gx, + (grad_output * y).sum(dim=3).sum(dim=2).sum(dim=0), + grad_output.sum(dim=3).sum(dim=2).sum(dim=0), + None, + ) + + +class LayerNorm2d(nn.Module): + def __init__(self, channels, eps=1e-6): + super(LayerNorm2d, self).__init__() + self.register_parameter("weight", nn.Parameter(torch.ones(channels))) + self.register_parameter("bias", nn.Parameter(torch.zeros(channels))) + self.eps = eps + + def forward(self, x): + return LayerNormFunction.apply(x, self.weight, self.bias, self.eps) + + +class GRN(nn.Module): + """GRN (Global Response Normalization) layer""" + + def __init__(self, dim): + super().__init__() + self.gamma = nn.Parameter(torch.zeros(1, dim, 1, 1)) + self.beta = nn.Parameter(torch.zeros(1, dim, 1, 1)) + + def forward(self, x): + Gx = torch.norm(x, p=2, dim=(2, 3), keepdim=True) + Nx = Gx / (Gx.mean(dim=1, keepdim=True) + 1e-6) + return self.gamma * (x * Nx) + self.beta + x diff --git a/ldm_patched/pfn/architecture/OmniSR/pixelshuffle.py b/ldm_patched/pfn/architecture/OmniSR/pixelshuffle.py new file mode 100644 index 0000000000000000000000000000000000000000..4260fb7c9d8d912e34899ce7877595b617f9bb02 --- /dev/null +++ b/ldm_patched/pfn/architecture/OmniSR/pixelshuffle.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +############################################################# +# File: pixelshuffle.py +# Created Date: Friday July 1st 2022 +# Author: Chen Xuanhong +# Email: chenxuanhongzju@outlook.com +# Last Modified: Friday, 1st July 2022 10:18:39 am +# Modified By: Chen Xuanhong +# Copyright (c) 2022 Shanghai Jiao Tong University +############################################################# + +import torch.nn as nn + + +def pixelshuffle_block( + in_channels, out_channels, upscale_factor=2, kernel_size=3, bias=False +): + """ + Upsample features according to `upscale_factor`. + """ + padding = kernel_size // 2 + conv = nn.Conv2d( + in_channels, + out_channels * (upscale_factor**2), + kernel_size, + padding=1, + bias=bias, + ) + pixel_shuffle = nn.PixelShuffle(upscale_factor) + return nn.Sequential(*[conv, pixel_shuffle]) diff --git a/ldm_patched/pfn/architecture/RRDB.py b/ldm_patched/pfn/architecture/RRDB.py new file mode 100644 index 0000000000000000000000000000000000000000..8d318b90b865fecd0b88adc6daf2c6d2e29860a3 --- /dev/null +++ b/ldm_patched/pfn/architecture/RRDB.py @@ -0,0 +1,296 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +import functools +import math +import re +from collections import OrderedDict + +import torch +import torch.nn as nn +import torch.nn.functional as F + +from . import block as B + + +# Borrowed from https://github.com/rlaphoenix/VSGAN/blob/master/vsgan/archs/esrgan.py +# Which enhanced stuff that was already here +class RRDBNet(nn.Module): + def __init__( + self, + state_dict, + norm=None, + act: str = "leakyrelu", + upsampler: str = "upconv", + mode: B.ConvMode = "CNA", + ) -> None: + """ + ESRGAN - Enhanced Super-Resolution Generative Adversarial Networks. + By Xintao Wang, Ke Yu, Shixiang Wu, Jinjin Gu, Yihao Liu, Chao Dong, Yu Qiao, + and Chen Change Loy. + This is old-arch Residual in Residual Dense Block Network and is not + the newest revision that's available at github.com/xinntao/ESRGAN. + This is on purpose, the newest Network has severely limited the + potential use of the Network with no benefits. + This network supports model files from both new and old-arch. + Args: + norm: Normalization layer + act: Activation layer + upsampler: Upsample layer. upconv, pixel_shuffle + mode: Convolution mode + """ + super(RRDBNet, self).__init__() + self.model_arch = "ESRGAN" + self.sub_type = "SR" + + self.state = state_dict + self.norm = norm + self.act = act + self.upsampler = upsampler + self.mode = mode + + self.state_map = { + # currently supports old, new, and newer RRDBNet arch models + # ESRGAN, BSRGAN/RealSR, Real-ESRGAN + "model.0.weight": ("conv_first.weight",), + "model.0.bias": ("conv_first.bias",), + "model.1.sub./NB/.weight": ("trunk_conv.weight", "conv_body.weight"), + "model.1.sub./NB/.bias": ("trunk_conv.bias", "conv_body.bias"), + r"model.1.sub.\1.RDB\2.conv\3.0.\4": ( + r"RRDB_trunk\.(\d+)\.RDB(\d)\.conv(\d+)\.(weight|bias)", + r"body\.(\d+)\.rdb(\d)\.conv(\d+)\.(weight|bias)", + ), + } + if "params_ema" in self.state: + self.state = self.state["params_ema"] + # self.model_arch = "RealESRGAN" + self.num_blocks = self.get_num_blocks() + self.plus = any("conv1x1" in k for k in self.state.keys()) + if self.plus: + self.model_arch = "ESRGAN+" + + self.state = self.new_to_old_arch(self.state) + + self.key_arr = list(self.state.keys()) + + self.in_nc: int = self.state[self.key_arr[0]].shape[1] + self.out_nc: int = self.state[self.key_arr[-1]].shape[0] + + self.scale: int = self.get_scale() + self.num_filters: int = self.state[self.key_arr[0]].shape[0] + + c2x2 = False + if self.state["model.0.weight"].shape[-2] == 2: + c2x2 = True + self.scale = round(math.sqrt(self.scale / 4)) + self.model_arch = "ESRGAN-2c2" + + self.supports_fp16 = True + self.supports_bfp16 = True + self.min_size_restriction = None + + # Detect if pixelunshuffle was used (Real-ESRGAN) + if self.in_nc in (self.out_nc * 4, self.out_nc * 16) and self.out_nc in ( + self.in_nc / 4, + self.in_nc / 16, + ): + self.shuffle_factor = int(math.sqrt(self.in_nc / self.out_nc)) + else: + self.shuffle_factor = None + + upsample_block = { + "upconv": B.upconv_block, + "pixel_shuffle": B.pixelshuffle_block, + }.get(self.upsampler) + if upsample_block is None: + raise NotImplementedError(f"Upsample mode [{self.upsampler}] is not found") + + if self.scale == 3: + upsample_blocks = upsample_block( + in_nc=self.num_filters, + out_nc=self.num_filters, + upscale_factor=3, + act_type=self.act, + c2x2=c2x2, + ) + else: + upsample_blocks = [ + upsample_block( + in_nc=self.num_filters, + out_nc=self.num_filters, + act_type=self.act, + c2x2=c2x2, + ) + for _ in range(int(math.log(self.scale, 2))) + ] + + self.model = B.sequential( + # fea conv + B.conv_block( + in_nc=self.in_nc, + out_nc=self.num_filters, + kernel_size=3, + norm_type=None, + act_type=None, + c2x2=c2x2, + ), + B.ShortcutBlock( + B.sequential( + # rrdb blocks + *[ + B.RRDB( + nf=self.num_filters, + kernel_size=3, + gc=32, + stride=1, + bias=True, + pad_type="zero", + norm_type=self.norm, + act_type=self.act, + mode="CNA", + plus=self.plus, + c2x2=c2x2, + ) + for _ in range(self.num_blocks) + ], + # lr conv + B.conv_block( + in_nc=self.num_filters, + out_nc=self.num_filters, + kernel_size=3, + norm_type=self.norm, + act_type=None, + mode=self.mode, + c2x2=c2x2, + ), + ) + ), + *upsample_blocks, + # hr_conv0 + B.conv_block( + in_nc=self.num_filters, + out_nc=self.num_filters, + kernel_size=3, + norm_type=None, + act_type=self.act, + c2x2=c2x2, + ), + # hr_conv1 + B.conv_block( + in_nc=self.num_filters, + out_nc=self.out_nc, + kernel_size=3, + norm_type=None, + act_type=None, + c2x2=c2x2, + ), + ) + + # Adjust these properties for calculations outside of the model + if self.shuffle_factor: + self.in_nc //= self.shuffle_factor**2 + self.scale //= self.shuffle_factor + + self.load_state_dict(self.state, strict=False) + + def new_to_old_arch(self, state): + """Convert a new-arch model state dictionary to an old-arch dictionary.""" + if "params_ema" in state: + state = state["params_ema"] + + if "conv_first.weight" not in state: + # model is already old arch, this is a loose check, but should be sufficient + return state + + # add nb to state keys + for kind in ("weight", "bias"): + self.state_map[f"model.1.sub.{self.num_blocks}.{kind}"] = self.state_map[ + f"model.1.sub./NB/.{kind}" + ] + del self.state_map[f"model.1.sub./NB/.{kind}"] + + old_state = OrderedDict() + for old_key, new_keys in self.state_map.items(): + for new_key in new_keys: + if r"\1" in old_key: + for k, v in state.items(): + sub = re.sub(new_key, old_key, k) + if sub != k: + old_state[sub] = v + else: + if new_key in state: + old_state[old_key] = state[new_key] + + # upconv layers + max_upconv = 0 + for key in state.keys(): + match = re.match(r"(upconv|conv_up)(\d)\.(weight|bias)", key) + if match is not None: + _, key_num, key_type = match.groups() + old_state[f"model.{int(key_num) * 3}.{key_type}"] = state[key] + max_upconv = max(max_upconv, int(key_num) * 3) + + # final layers + for key in state.keys(): + if key in ("HRconv.weight", "conv_hr.weight"): + old_state[f"model.{max_upconv + 2}.weight"] = state[key] + elif key in ("HRconv.bias", "conv_hr.bias"): + old_state[f"model.{max_upconv + 2}.bias"] = state[key] + elif key in ("conv_last.weight",): + old_state[f"model.{max_upconv + 4}.weight"] = state[key] + elif key in ("conv_last.bias",): + old_state[f"model.{max_upconv + 4}.bias"] = state[key] + + # Sort by first numeric value of each layer + def compare(item1, item2): + parts1 = item1.split(".") + parts2 = item2.split(".") + int1 = int(parts1[1]) + int2 = int(parts2[1]) + return int1 - int2 + + sorted_keys = sorted(old_state.keys(), key=functools.cmp_to_key(compare)) + + # Rebuild the output dict in the right order + out_dict = OrderedDict((k, old_state[k]) for k in sorted_keys) + + return out_dict + + def get_scale(self, min_part: int = 6) -> int: + n = 0 + for part in list(self.state): + parts = part.split(".")[1:] + if len(parts) == 2: + part_num = int(parts[0]) + if part_num > min_part and parts[1] == "weight": + n += 1 + return 2**n + + def get_num_blocks(self) -> int: + nbs = [] + state_keys = self.state_map[r"model.1.sub.\1.RDB\2.conv\3.0.\4"] + ( + r"model\.\d+\.sub\.(\d+)\.RDB(\d+)\.conv(\d+)\.0\.(weight|bias)", + ) + for state_key in state_keys: + for k in self.state: + m = re.search(state_key, k) + if m: + nbs.append(int(m.group(1))) + if nbs: + break + return max(*nbs) + 1 + + def forward(self, x): + if self.shuffle_factor: + _, _, h, w = x.size() + mod_pad_h = ( + self.shuffle_factor - h % self.shuffle_factor + ) % self.shuffle_factor + mod_pad_w = ( + self.shuffle_factor - w % self.shuffle_factor + ) % self.shuffle_factor + x = F.pad(x, (0, mod_pad_w, 0, mod_pad_h), "reflect") + x = torch.pixel_unshuffle(x, downscale_factor=self.shuffle_factor) + x = self.model(x) + return x[:, :, : h * self.scale, : w * self.scale] + return self.model(x) diff --git a/ldm_patched/pfn/architecture/SCUNet.py b/ldm_patched/pfn/architecture/SCUNet.py new file mode 100644 index 0000000000000000000000000000000000000000..b8354a873085140e9ff7d582c43ba9818ed9524e --- /dev/null +++ b/ldm_patched/pfn/architecture/SCUNet.py @@ -0,0 +1,455 @@ +# pylint: skip-file +# ----------------------------------------------------------------------------------- +# SCUNet: Practical Blind Denoising via Swin-Conv-UNet and Data Synthesis, https://arxiv.org/abs/2203.13278 +# Zhang, Kai and Li, Yawei and Liang, Jingyun and Cao, Jiezhang and Zhang, Yulun and Tang, Hao and Timofte, Radu and Van Gool, Luc +# ----------------------------------------------------------------------------------- + +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +from einops import rearrange +from einops.layers.torch import Rearrange + +from .timm.drop import DropPath +from .timm.weight_init import trunc_normal_ + + +# Borrowed from https://github.com/cszn/SCUNet/blob/main/models/network_scunet.py +class WMSA(nn.Module): + """Self-attention module in Swin Transformer""" + + def __init__(self, input_dim, output_dim, head_dim, window_size, type): + super(WMSA, self).__init__() + self.input_dim = input_dim + self.output_dim = output_dim + self.head_dim = head_dim + self.scale = self.head_dim**-0.5 + self.n_heads = input_dim // head_dim + self.window_size = window_size + self.type = type + self.embedding_layer = nn.Linear(self.input_dim, 3 * self.input_dim, bias=True) + + self.relative_position_params = nn.Parameter( + torch.zeros((2 * window_size - 1) * (2 * window_size - 1), self.n_heads) + ) + # TODO recover + # self.relative_position_params = nn.Parameter(torch.zeros(self.n_heads, 2 * window_size - 1, 2 * window_size -1)) + self.relative_position_params = nn.Parameter( + torch.zeros((2 * window_size - 1) * (2 * window_size - 1), self.n_heads) + ) + + self.linear = nn.Linear(self.input_dim, self.output_dim) + + trunc_normal_(self.relative_position_params, std=0.02) + self.relative_position_params = torch.nn.Parameter( + self.relative_position_params.view( + 2 * window_size - 1, 2 * window_size - 1, self.n_heads + ) + .transpose(1, 2) + .transpose(0, 1) + ) + + def generate_mask(self, h, w, p, shift): + """generating the mask of SW-MSA + Args: + shift: shift parameters in CyclicShift. + Returns: + attn_mask: should be (1 1 w p p), + """ + # supporting square. + attn_mask = torch.zeros( + h, + w, + p, + p, + p, + p, + dtype=torch.bool, + device=self.relative_position_params.device, + ) + if self.type == "W": + return attn_mask + + s = p - shift + attn_mask[-1, :, :s, :, s:, :] = True + attn_mask[-1, :, s:, :, :s, :] = True + attn_mask[:, -1, :, :s, :, s:] = True + attn_mask[:, -1, :, s:, :, :s] = True + attn_mask = rearrange( + attn_mask, "w1 w2 p1 p2 p3 p4 -> 1 1 (w1 w2) (p1 p2) (p3 p4)" + ) + return attn_mask + + def forward(self, x): + """Forward pass of Window Multi-head Self-attention module. + Args: + x: input tensor with shape of [b h w c]; + attn_mask: attention mask, fill -inf where the value is True; + Returns: + output: tensor shape [b h w c] + """ + if self.type != "W": + x = torch.roll( + x, + shifts=(-(self.window_size // 2), -(self.window_size // 2)), + dims=(1, 2), + ) + + x = rearrange( + x, + "b (w1 p1) (w2 p2) c -> b w1 w2 p1 p2 c", + p1=self.window_size, + p2=self.window_size, + ) + h_windows = x.size(1) + w_windows = x.size(2) + # square validation + # assert h_windows == w_windows + + x = rearrange( + x, + "b w1 w2 p1 p2 c -> b (w1 w2) (p1 p2) c", + p1=self.window_size, + p2=self.window_size, + ) + qkv = self.embedding_layer(x) + q, k, v = rearrange( + qkv, "b nw np (threeh c) -> threeh b nw np c", c=self.head_dim + ).chunk(3, dim=0) + sim = torch.einsum("hbwpc,hbwqc->hbwpq", q, k) * self.scale + # Adding learnable relative embedding + sim = sim + rearrange(self.relative_embedding(), "h p q -> h 1 1 p q") + # Using Attn Mask to distinguish different subwindows. + if self.type != "W": + attn_mask = self.generate_mask( + h_windows, w_windows, self.window_size, shift=self.window_size // 2 + ) + sim = sim.masked_fill_(attn_mask, float("-inf")) + + probs = nn.functional.softmax(sim, dim=-1) + output = torch.einsum("hbwij,hbwjc->hbwic", probs, v) + output = rearrange(output, "h b w p c -> b w p (h c)") + output = self.linear(output) + output = rearrange( + output, + "b (w1 w2) (p1 p2) c -> b (w1 p1) (w2 p2) c", + w1=h_windows, + p1=self.window_size, + ) + + if self.type != "W": + output = torch.roll( + output, + shifts=(self.window_size // 2, self.window_size // 2), + dims=(1, 2), + ) + + return output + + def relative_embedding(self): + cord = torch.tensor( + np.array( + [ + [i, j] + for i in range(self.window_size) + for j in range(self.window_size) + ] + ) + ) + relation = cord[:, None, :] - cord[None, :, :] + self.window_size - 1 + # negative is allowed + return self.relative_position_params[ + :, relation[:, :, 0].long(), relation[:, :, 1].long() + ] + + +class Block(nn.Module): + def __init__( + self, + input_dim, + output_dim, + head_dim, + window_size, + drop_path, + type="W", + input_resolution=None, + ): + """SwinTransformer Block""" + super(Block, self).__init__() + self.input_dim = input_dim + self.output_dim = output_dim + assert type in ["W", "SW"] + self.type = type + if input_resolution <= window_size: + self.type = "W" + + self.ln1 = nn.LayerNorm(input_dim) + self.msa = WMSA(input_dim, input_dim, head_dim, window_size, self.type) + self.drop_path = DropPath(drop_path) if drop_path > 0.0 else nn.Identity() + self.ln2 = nn.LayerNorm(input_dim) + self.mlp = nn.Sequential( + nn.Linear(input_dim, 4 * input_dim), + nn.GELU(), + nn.Linear(4 * input_dim, output_dim), + ) + + def forward(self, x): + x = x + self.drop_path(self.msa(self.ln1(x))) + x = x + self.drop_path(self.mlp(self.ln2(x))) + return x + + +class ConvTransBlock(nn.Module): + def __init__( + self, + conv_dim, + trans_dim, + head_dim, + window_size, + drop_path, + type="W", + input_resolution=None, + ): + """SwinTransformer and Conv Block""" + super(ConvTransBlock, self).__init__() + self.conv_dim = conv_dim + self.trans_dim = trans_dim + self.head_dim = head_dim + self.window_size = window_size + self.drop_path = drop_path + self.type = type + self.input_resolution = input_resolution + + assert self.type in ["W", "SW"] + if self.input_resolution <= self.window_size: + self.type = "W" + + self.trans_block = Block( + self.trans_dim, + self.trans_dim, + self.head_dim, + self.window_size, + self.drop_path, + self.type, + self.input_resolution, + ) + self.conv1_1 = nn.Conv2d( + self.conv_dim + self.trans_dim, + self.conv_dim + self.trans_dim, + 1, + 1, + 0, + bias=True, + ) + self.conv1_2 = nn.Conv2d( + self.conv_dim + self.trans_dim, + self.conv_dim + self.trans_dim, + 1, + 1, + 0, + bias=True, + ) + + self.conv_block = nn.Sequential( + nn.Conv2d(self.conv_dim, self.conv_dim, 3, 1, 1, bias=False), + nn.ReLU(True), + nn.Conv2d(self.conv_dim, self.conv_dim, 3, 1, 1, bias=False), + ) + + def forward(self, x): + conv_x, trans_x = torch.split( + self.conv1_1(x), (self.conv_dim, self.trans_dim), dim=1 + ) + conv_x = self.conv_block(conv_x) + conv_x + trans_x = Rearrange("b c h w -> b h w c")(trans_x) + trans_x = self.trans_block(trans_x) + trans_x = Rearrange("b h w c -> b c h w")(trans_x) + res = self.conv1_2(torch.cat((conv_x, trans_x), dim=1)) + x = x + res + + return x + + +class SCUNet(nn.Module): + def __init__( + self, + state_dict, + in_nc=3, + config=[4, 4, 4, 4, 4, 4, 4], + dim=64, + drop_path_rate=0.0, + input_resolution=256, + ): + super(SCUNet, self).__init__() + self.model_arch = "SCUNet" + self.sub_type = "SR" + + self.num_filters: int = 0 + + self.state = state_dict + self.config = config + self.dim = dim + self.head_dim = 32 + self.window_size = 8 + + self.in_nc = in_nc + self.out_nc = self.in_nc + self.scale = 1 + self.supports_fp16 = True + + # drop path rate for each layer + dpr = [x.item() for x in torch.linspace(0, drop_path_rate, sum(config))] + + self.m_head = [nn.Conv2d(in_nc, dim, 3, 1, 1, bias=False)] + + begin = 0 + self.m_down1 = [ + ConvTransBlock( + dim // 2, + dim // 2, + self.head_dim, + self.window_size, + dpr[i + begin], + "W" if not i % 2 else "SW", + input_resolution, + ) + for i in range(config[0]) + ] + [nn.Conv2d(dim, 2 * dim, 2, 2, 0, bias=False)] + + begin += config[0] + self.m_down2 = [ + ConvTransBlock( + dim, + dim, + self.head_dim, + self.window_size, + dpr[i + begin], + "W" if not i % 2 else "SW", + input_resolution // 2, + ) + for i in range(config[1]) + ] + [nn.Conv2d(2 * dim, 4 * dim, 2, 2, 0, bias=False)] + + begin += config[1] + self.m_down3 = [ + ConvTransBlock( + 2 * dim, + 2 * dim, + self.head_dim, + self.window_size, + dpr[i + begin], + "W" if not i % 2 else "SW", + input_resolution // 4, + ) + for i in range(config[2]) + ] + [nn.Conv2d(4 * dim, 8 * dim, 2, 2, 0, bias=False)] + + begin += config[2] + self.m_body = [ + ConvTransBlock( + 4 * dim, + 4 * dim, + self.head_dim, + self.window_size, + dpr[i + begin], + "W" if not i % 2 else "SW", + input_resolution // 8, + ) + for i in range(config[3]) + ] + + begin += config[3] + self.m_up3 = [ + nn.ConvTranspose2d(8 * dim, 4 * dim, 2, 2, 0, bias=False), + ] + [ + ConvTransBlock( + 2 * dim, + 2 * dim, + self.head_dim, + self.window_size, + dpr[i + begin], + "W" if not i % 2 else "SW", + input_resolution // 4, + ) + for i in range(config[4]) + ] + + begin += config[4] + self.m_up2 = [ + nn.ConvTranspose2d(4 * dim, 2 * dim, 2, 2, 0, bias=False), + ] + [ + ConvTransBlock( + dim, + dim, + self.head_dim, + self.window_size, + dpr[i + begin], + "W" if not i % 2 else "SW", + input_resolution // 2, + ) + for i in range(config[5]) + ] + + begin += config[5] + self.m_up1 = [ + nn.ConvTranspose2d(2 * dim, dim, 2, 2, 0, bias=False), + ] + [ + ConvTransBlock( + dim // 2, + dim // 2, + self.head_dim, + self.window_size, + dpr[i + begin], + "W" if not i % 2 else "SW", + input_resolution, + ) + for i in range(config[6]) + ] + + self.m_tail = [nn.Conv2d(dim, in_nc, 3, 1, 1, bias=False)] + + self.m_head = nn.Sequential(*self.m_head) + self.m_down1 = nn.Sequential(*self.m_down1) + self.m_down2 = nn.Sequential(*self.m_down2) + self.m_down3 = nn.Sequential(*self.m_down3) + self.m_body = nn.Sequential(*self.m_body) + self.m_up3 = nn.Sequential(*self.m_up3) + self.m_up2 = nn.Sequential(*self.m_up2) + self.m_up1 = nn.Sequential(*self.m_up1) + self.m_tail = nn.Sequential(*self.m_tail) + # self.apply(self._init_weights) + self.load_state_dict(state_dict, strict=True) + + def check_image_size(self, x): + _, _, h, w = x.size() + mod_pad_h = (64 - h % 64) % 64 + mod_pad_w = (64 - w % 64) % 64 + x = F.pad(x, (0, mod_pad_w, 0, mod_pad_h), "reflect") + return x + + def forward(self, x0): + h, w = x0.size()[-2:] + x0 = self.check_image_size(x0) + + x1 = self.m_head(x0) + x2 = self.m_down1(x1) + x3 = self.m_down2(x2) + x4 = self.m_down3(x3) + x = self.m_body(x4) + x = self.m_up3(x + x4) + x = self.m_up2(x + x3) + x = self.m_up1(x + x2) + x = self.m_tail(x + x1) + + x = x[:, :, :h, :w] + return x + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=0.02) + if m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) diff --git a/ldm_patched/pfn/architecture/SPSR.py b/ldm_patched/pfn/architecture/SPSR.py new file mode 100644 index 0000000000000000000000000000000000000000..c3cefff190292a63cf61fe3fa9c28131dac4f369 --- /dev/null +++ b/ldm_patched/pfn/architecture/SPSR.py @@ -0,0 +1,383 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +import math + +import torch +import torch.nn as nn +import torch.nn.functional as F + +from . import block as B + + +class Get_gradient_nopadding(nn.Module): + def __init__(self): + super(Get_gradient_nopadding, self).__init__() + kernel_v = [[0, -1, 0], [0, 0, 0], [0, 1, 0]] + kernel_h = [[0, 0, 0], [-1, 0, 1], [0, 0, 0]] + kernel_h = torch.FloatTensor(kernel_h).unsqueeze(0).unsqueeze(0) + kernel_v = torch.FloatTensor(kernel_v).unsqueeze(0).unsqueeze(0) + self.weight_h = nn.Parameter(data=kernel_h, requires_grad=False) # type: ignore + + self.weight_v = nn.Parameter(data=kernel_v, requires_grad=False) # type: ignore + + def forward(self, x): + x_list = [] + for i in range(x.shape[1]): + x_i = x[:, i] + x_i_v = F.conv2d(x_i.unsqueeze(1), self.weight_v, padding=1) + x_i_h = F.conv2d(x_i.unsqueeze(1), self.weight_h, padding=1) + x_i = torch.sqrt(torch.pow(x_i_v, 2) + torch.pow(x_i_h, 2) + 1e-6) + x_list.append(x_i) + + x = torch.cat(x_list, dim=1) + + return x + + +class SPSRNet(nn.Module): + def __init__( + self, + state_dict, + norm=None, + act: str = "leakyrelu", + upsampler: str = "upconv", + mode: B.ConvMode = "CNA", + ): + super(SPSRNet, self).__init__() + self.model_arch = "SPSR" + self.sub_type = "SR" + + self.state = state_dict + self.norm = norm + self.act = act + self.upsampler = upsampler + self.mode = mode + + self.num_blocks = self.get_num_blocks() + + self.in_nc: int = self.state["model.0.weight"].shape[1] + self.out_nc: int = self.state["f_HR_conv1.0.bias"].shape[0] + + self.scale = self.get_scale(4) + self.num_filters: int = self.state["model.0.weight"].shape[0] + + self.supports_fp16 = True + self.supports_bfp16 = True + self.min_size_restriction = None + + n_upscale = int(math.log(self.scale, 2)) + if self.scale == 3: + n_upscale = 1 + + fea_conv = B.conv_block( + self.in_nc, self.num_filters, kernel_size=3, norm_type=None, act_type=None + ) + rb_blocks = [ + B.RRDB( + self.num_filters, + kernel_size=3, + gc=32, + stride=1, + bias=True, + pad_type="zero", + norm_type=norm, + act_type=act, + mode="CNA", + ) + for _ in range(self.num_blocks) + ] + LR_conv = B.conv_block( + self.num_filters, + self.num_filters, + kernel_size=3, + norm_type=norm, + act_type=None, + mode=mode, + ) + + if upsampler == "upconv": + upsample_block = B.upconv_block + elif upsampler == "pixelshuffle": + upsample_block = B.pixelshuffle_block + else: + raise NotImplementedError(f"upsample mode [{upsampler}] is not found") + if self.scale == 3: + a_upsampler = upsample_block( + self.num_filters, self.num_filters, 3, act_type=act + ) + else: + a_upsampler = [ + upsample_block(self.num_filters, self.num_filters, act_type=act) + for _ in range(n_upscale) + ] + self.HR_conv0_new = B.conv_block( + self.num_filters, + self.num_filters, + kernel_size=3, + norm_type=None, + act_type=act, + ) + self.HR_conv1_new = B.conv_block( + self.num_filters, + self.num_filters, + kernel_size=3, + norm_type=None, + act_type=None, + ) + + self.model = B.sequential( + fea_conv, + B.ShortcutBlockSPSR(B.sequential(*rb_blocks, LR_conv)), + *a_upsampler, + self.HR_conv0_new, + ) + + self.get_g_nopadding = Get_gradient_nopadding() + + self.b_fea_conv = B.conv_block( + self.in_nc, self.num_filters, kernel_size=3, norm_type=None, act_type=None + ) + + self.b_concat_1 = B.conv_block( + 2 * self.num_filters, + self.num_filters, + kernel_size=3, + norm_type=None, + act_type=None, + ) + self.b_block_1 = B.RRDB( + self.num_filters * 2, + kernel_size=3, + gc=32, + stride=1, + bias=True, + pad_type="zero", + norm_type=norm, + act_type=act, + mode="CNA", + ) + + self.b_concat_2 = B.conv_block( + 2 * self.num_filters, + self.num_filters, + kernel_size=3, + norm_type=None, + act_type=None, + ) + self.b_block_2 = B.RRDB( + self.num_filters * 2, + kernel_size=3, + gc=32, + stride=1, + bias=True, + pad_type="zero", + norm_type=norm, + act_type=act, + mode="CNA", + ) + + self.b_concat_3 = B.conv_block( + 2 * self.num_filters, + self.num_filters, + kernel_size=3, + norm_type=None, + act_type=None, + ) + self.b_block_3 = B.RRDB( + self.num_filters * 2, + kernel_size=3, + gc=32, + stride=1, + bias=True, + pad_type="zero", + norm_type=norm, + act_type=act, + mode="CNA", + ) + + self.b_concat_4 = B.conv_block( + 2 * self.num_filters, + self.num_filters, + kernel_size=3, + norm_type=None, + act_type=None, + ) + self.b_block_4 = B.RRDB( + self.num_filters * 2, + kernel_size=3, + gc=32, + stride=1, + bias=True, + pad_type="zero", + norm_type=norm, + act_type=act, + mode="CNA", + ) + + self.b_LR_conv = B.conv_block( + self.num_filters, + self.num_filters, + kernel_size=3, + norm_type=norm, + act_type=None, + mode=mode, + ) + + if upsampler == "upconv": + upsample_block = B.upconv_block + elif upsampler == "pixelshuffle": + upsample_block = B.pixelshuffle_block + else: + raise NotImplementedError(f"upsample mode [{upsampler}] is not found") + if self.scale == 3: + b_upsampler = upsample_block( + self.num_filters, self.num_filters, 3, act_type=act + ) + else: + b_upsampler = [ + upsample_block(self.num_filters, self.num_filters, act_type=act) + for _ in range(n_upscale) + ] + + b_HR_conv0 = B.conv_block( + self.num_filters, + self.num_filters, + kernel_size=3, + norm_type=None, + act_type=act, + ) + b_HR_conv1 = B.conv_block( + self.num_filters, + self.num_filters, + kernel_size=3, + norm_type=None, + act_type=None, + ) + + self.b_module = B.sequential(*b_upsampler, b_HR_conv0, b_HR_conv1) + + self.conv_w = B.conv_block( + self.num_filters, self.out_nc, kernel_size=1, norm_type=None, act_type=None + ) + + self.f_concat = B.conv_block( + self.num_filters * 2, + self.num_filters, + kernel_size=3, + norm_type=None, + act_type=None, + ) + + self.f_block = B.RRDB( + self.num_filters * 2, + kernel_size=3, + gc=32, + stride=1, + bias=True, + pad_type="zero", + norm_type=norm, + act_type=act, + mode="CNA", + ) + + self.f_HR_conv0 = B.conv_block( + self.num_filters, + self.num_filters, + kernel_size=3, + norm_type=None, + act_type=act, + ) + self.f_HR_conv1 = B.conv_block( + self.num_filters, self.out_nc, kernel_size=3, norm_type=None, act_type=None + ) + + self.load_state_dict(self.state, strict=False) + + def get_scale(self, min_part: int = 4) -> int: + n = 0 + for part in list(self.state): + parts = part.split(".") + if len(parts) == 3: + part_num = int(parts[1]) + if part_num > min_part and parts[0] == "model" and parts[2] == "weight": + n += 1 + return 2**n + + def get_num_blocks(self) -> int: + nb = 0 + for part in list(self.state): + parts = part.split(".") + n_parts = len(parts) + if n_parts == 5 and parts[2] == "sub": + nb = int(parts[3]) + return nb + + def forward(self, x): + x_grad = self.get_g_nopadding(x) + x = self.model[0](x) + + x, block_list = self.model[1](x) + + x_ori = x + for i in range(5): + x = block_list[i](x) + x_fea1 = x + + for i in range(5): + x = block_list[i + 5](x) + x_fea2 = x + + for i in range(5): + x = block_list[i + 10](x) + x_fea3 = x + + for i in range(5): + x = block_list[i + 15](x) + x_fea4 = x + + x = block_list[20:](x) + # short cut + x = x_ori + x + x = self.model[2:](x) + x = self.HR_conv1_new(x) + + x_b_fea = self.b_fea_conv(x_grad) + x_cat_1 = torch.cat([x_b_fea, x_fea1], dim=1) + + x_cat_1 = self.b_block_1(x_cat_1) + x_cat_1 = self.b_concat_1(x_cat_1) + + x_cat_2 = torch.cat([x_cat_1, x_fea2], dim=1) + + x_cat_2 = self.b_block_2(x_cat_2) + x_cat_2 = self.b_concat_2(x_cat_2) + + x_cat_3 = torch.cat([x_cat_2, x_fea3], dim=1) + + x_cat_3 = self.b_block_3(x_cat_3) + x_cat_3 = self.b_concat_3(x_cat_3) + + x_cat_4 = torch.cat([x_cat_3, x_fea4], dim=1) + + x_cat_4 = self.b_block_4(x_cat_4) + x_cat_4 = self.b_concat_4(x_cat_4) + + x_cat_4 = self.b_LR_conv(x_cat_4) + + # short cut + x_cat_4 = x_cat_4 + x_b_fea + x_branch = self.b_module(x_cat_4) + + # x_out_branch = self.conv_w(x_branch) + ######## + x_branch_d = x_branch + x_f_cat = torch.cat([x_branch_d, x], dim=1) + x_f_cat = self.f_block(x_f_cat) + x_out = self.f_concat(x_f_cat) + x_out = self.f_HR_conv0(x_out) + x_out = self.f_HR_conv1(x_out) + + ######### + # return x_out_branch, x_out, x_grad + return x_out diff --git a/ldm_patched/pfn/architecture/SRVGG.py b/ldm_patched/pfn/architecture/SRVGG.py new file mode 100644 index 0000000000000000000000000000000000000000..7a8ec37ae5dc4effd0ba688cf4c3a51801e1f2c9 --- /dev/null +++ b/ldm_patched/pfn/architecture/SRVGG.py @@ -0,0 +1,114 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +import math + +import torch.nn as nn +import torch.nn.functional as F + + +class SRVGGNetCompact(nn.Module): + """A compact VGG-style network structure for super-resolution. + It is a compact network structure, which performs upsampling in the last layer and no convolution is + conducted on the HR feature space. + Args: + num_in_ch (int): Channel number of inputs. Default: 3. + num_out_ch (int): Channel number of outputs. Default: 3. + num_feat (int): Channel number of intermediate features. Default: 64. + num_conv (int): Number of convolution layers in the body network. Default: 16. + upscale (int): Upsampling factor. Default: 4. + act_type (str): Activation type, options: 'relu', 'prelu', 'leakyrelu'. Default: prelu. + """ + + def __init__( + self, + state_dict, + act_type: str = "prelu", + ): + super(SRVGGNetCompact, self).__init__() + self.model_arch = "SRVGG (RealESRGAN)" + self.sub_type = "SR" + + self.act_type = act_type + + self.state = state_dict + + if "params" in self.state: + self.state = self.state["params"] + + self.key_arr = list(self.state.keys()) + + self.in_nc = self.get_in_nc() + self.num_feat = self.get_num_feats() + self.num_conv = self.get_num_conv() + self.out_nc = self.in_nc # :( + self.pixelshuffle_shape = None # Defined in get_scale() + self.scale = self.get_scale() + + self.supports_fp16 = True + self.supports_bfp16 = True + self.min_size_restriction = None + + self.body = nn.ModuleList() + # the first conv + self.body.append(nn.Conv2d(self.in_nc, self.num_feat, 3, 1, 1)) + # the first activation + if act_type == "relu": + activation = nn.ReLU(inplace=True) + elif act_type == "prelu": + activation = nn.PReLU(num_parameters=self.num_feat) + elif act_type == "leakyrelu": + activation = nn.LeakyReLU(negative_slope=0.1, inplace=True) + self.body.append(activation) # type: ignore + + # the body structure + for _ in range(self.num_conv): + self.body.append(nn.Conv2d(self.num_feat, self.num_feat, 3, 1, 1)) + # activation + if act_type == "relu": + activation = nn.ReLU(inplace=True) + elif act_type == "prelu": + activation = nn.PReLU(num_parameters=self.num_feat) + elif act_type == "leakyrelu": + activation = nn.LeakyReLU(negative_slope=0.1, inplace=True) + self.body.append(activation) # type: ignore + + # the last conv + self.body.append(nn.Conv2d(self.num_feat, self.pixelshuffle_shape, 3, 1, 1)) # type: ignore + # upsample + self.upsampler = nn.PixelShuffle(self.scale) + + self.load_state_dict(self.state, strict=False) + + def get_num_conv(self) -> int: + return (int(self.key_arr[-1].split(".")[1]) - 2) // 2 + + def get_num_feats(self) -> int: + return self.state[self.key_arr[0]].shape[0] + + def get_in_nc(self) -> int: + return self.state[self.key_arr[0]].shape[1] + + def get_scale(self) -> int: + self.pixelshuffle_shape = self.state[self.key_arr[-1]].shape[0] + # Assume out_nc is the same as in_nc + # I cant think of a better way to do that + self.out_nc = self.in_nc + scale = math.sqrt(self.pixelshuffle_shape / self.out_nc) + if scale - int(scale) > 0: + print( + "out_nc is probably different than in_nc, scale calculation might be wrong" + ) + scale = int(scale) + return scale + + def forward(self, x): + out = x + for i in range(0, len(self.body)): + out = self.body[i](out) + + out = self.upsampler(out) + # add the nearest upsampled image, so that the network learns the residual + base = F.interpolate(x, scale_factor=self.scale, mode="nearest") + out += base + return out diff --git a/ldm_patched/pfn/architecture/SwiftSRGAN.py b/ldm_patched/pfn/architecture/SwiftSRGAN.py new file mode 100644 index 0000000000000000000000000000000000000000..dbb7725b08dc2462661b7ba45db605a06fadacb9 --- /dev/null +++ b/ldm_patched/pfn/architecture/SwiftSRGAN.py @@ -0,0 +1,161 @@ +# From https://github.com/Koushik0901/Swift-SRGAN/blob/master/swift-srgan/models.py + +import torch +from torch import nn + + +class SeperableConv2d(nn.Module): + def __init__( + self, in_channels, out_channels, kernel_size, stride=1, padding=1, bias=True + ): + super(SeperableConv2d, self).__init__() + self.depthwise = nn.Conv2d( + in_channels, + in_channels, + kernel_size=kernel_size, + stride=stride, + groups=in_channels, + bias=bias, + padding=padding, + ) + self.pointwise = nn.Conv2d(in_channels, out_channels, kernel_size=1, bias=bias) + + def forward(self, x): + return self.pointwise(self.depthwise(x)) + + +class ConvBlock(nn.Module): + def __init__( + self, + in_channels, + out_channels, + use_act=True, + use_bn=True, + discriminator=False, + **kwargs, + ): + super(ConvBlock, self).__init__() + + self.use_act = use_act + self.cnn = SeperableConv2d(in_channels, out_channels, **kwargs, bias=not use_bn) + self.bn = nn.BatchNorm2d(out_channels) if use_bn else nn.Identity() + self.act = ( + nn.LeakyReLU(0.2, inplace=True) + if discriminator + else nn.PReLU(num_parameters=out_channels) + ) + + def forward(self, x): + return self.act(self.bn(self.cnn(x))) if self.use_act else self.bn(self.cnn(x)) + + +class UpsampleBlock(nn.Module): + def __init__(self, in_channels, scale_factor): + super(UpsampleBlock, self).__init__() + + self.conv = SeperableConv2d( + in_channels, + in_channels * scale_factor**2, + kernel_size=3, + stride=1, + padding=1, + ) + self.ps = nn.PixelShuffle( + scale_factor + ) # (in_channels * 4, H, W) -> (in_channels, H*2, W*2) + self.act = nn.PReLU(num_parameters=in_channels) + + def forward(self, x): + return self.act(self.ps(self.conv(x))) + + +class ResidualBlock(nn.Module): + def __init__(self, in_channels): + super(ResidualBlock, self).__init__() + + self.block1 = ConvBlock( + in_channels, in_channels, kernel_size=3, stride=1, padding=1 + ) + self.block2 = ConvBlock( + in_channels, in_channels, kernel_size=3, stride=1, padding=1, use_act=False + ) + + def forward(self, x): + out = self.block1(x) + out = self.block2(out) + return out + x + + +class Generator(nn.Module): + """Swift-SRGAN Generator + Args: + in_channels (int): number of input image channels. + num_channels (int): number of hidden channels. + num_blocks (int): number of residual blocks. + upscale_factor (int): factor to upscale the image [2x, 4x, 8x]. + Returns: + torch.Tensor: super resolution image + """ + + def __init__( + self, + state_dict, + ): + super(Generator, self).__init__() + self.model_arch = "Swift-SRGAN" + self.sub_type = "SR" + self.state = state_dict + if "model" in self.state: + self.state = self.state["model"] + + self.in_nc: int = self.state["initial.cnn.depthwise.weight"].shape[0] + self.out_nc: int = self.state["final_conv.pointwise.weight"].shape[0] + self.num_filters: int = self.state["initial.cnn.pointwise.weight"].shape[0] + self.num_blocks = len( + set([x.split(".")[1] for x in self.state.keys() if "residual" in x]) + ) + self.scale: int = 2 ** len( + set([x.split(".")[1] for x in self.state.keys() if "upsampler" in x]) + ) + + in_channels = self.in_nc + num_channels = self.num_filters + num_blocks = self.num_blocks + upscale_factor = self.scale + + self.supports_fp16 = True + self.supports_bfp16 = True + self.min_size_restriction = None + + self.initial = ConvBlock( + in_channels, num_channels, kernel_size=9, stride=1, padding=4, use_bn=False + ) + self.residual = nn.Sequential( + *[ResidualBlock(num_channels) for _ in range(num_blocks)] + ) + self.convblock = ConvBlock( + num_channels, + num_channels, + kernel_size=3, + stride=1, + padding=1, + use_act=False, + ) + self.upsampler = nn.Sequential( + *[ + UpsampleBlock(num_channels, scale_factor=2) + for _ in range(upscale_factor // 2) + ] + ) + self.final_conv = SeperableConv2d( + num_channels, in_channels, kernel_size=9, stride=1, padding=4 + ) + + self.load_state_dict(self.state, strict=False) + + def forward(self, x): + initial = self.initial(x) + x = self.residual(initial) + x = self.convblock(x) + initial + x = self.upsampler(x) + return (torch.tanh(self.final_conv(x)) + 1) / 2 diff --git a/ldm_patched/pfn/architecture/Swin2SR.py b/ldm_patched/pfn/architecture/Swin2SR.py new file mode 100644 index 0000000000000000000000000000000000000000..cb57ecfc4ada45a6b087247017732437b1af0fcc --- /dev/null +++ b/ldm_patched/pfn/architecture/Swin2SR.py @@ -0,0 +1,1377 @@ +# pylint: skip-file +# ----------------------------------------------------------------------------------- +# Swin2SR: Swin2SR: SwinV2 Transformer for Compressed Image Super-Resolution and Restoration, https://arxiv.org/abs/2209.11345 +# Written by Conde and Choi et al. +# From: https://raw.githubusercontent.com/mv-lab/swin2sr/main/models/network_swin2sr.py +# ----------------------------------------------------------------------------------- + +import math +import re + +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +import torch.utils.checkpoint as checkpoint + +# Originally from the timm package +from .timm.drop import DropPath +from .timm.helpers import to_2tuple +from .timm.weight_init import trunc_normal_ + + +class Mlp(nn.Module): + def __init__( + self, + in_features, + hidden_features=None, + out_features=None, + act_layer=nn.GELU, + drop=0.0, + ): + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Linear(in_features, hidden_features) + self.act = act_layer() + self.fc2 = nn.Linear(hidden_features, out_features) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + + +def window_partition(x, window_size): + """ + Args: + x: (B, H, W, C) + window_size (int): window size + Returns: + windows: (num_windows*B, window_size, window_size, C) + """ + B, H, W, C = x.shape + x = x.view(B, H // window_size, window_size, W // window_size, window_size, C) + windows = ( + x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, window_size, window_size, C) + ) + return windows + + +def window_reverse(windows, window_size, H, W): + """ + Args: + windows: (num_windows*B, window_size, window_size, C) + window_size (int): Window size + H (int): Height of image + W (int): Width of image + Returns: + x: (B, H, W, C) + """ + B = int(windows.shape[0] / (H * W / window_size / window_size)) + x = windows.view( + B, H // window_size, W // window_size, window_size, window_size, -1 + ) + x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(B, H, W, -1) + return x + + +class WindowAttention(nn.Module): + r"""Window based multi-head self attention (W-MSA) module with relative position bias. + It supports both of shifted and non-shifted window. + Args: + dim (int): Number of input channels. + window_size (tuple[int]): The height and width of the window. + num_heads (int): Number of attention heads. + qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True + attn_drop (float, optional): Dropout ratio of attention weight. Default: 0.0 + proj_drop (float, optional): Dropout ratio of output. Default: 0.0 + pretrained_window_size (tuple[int]): The height and width of the window in pre-training. + """ + + def __init__( + self, + dim, + window_size, + num_heads, + qkv_bias=True, + attn_drop=0.0, + proj_drop=0.0, + pretrained_window_size=[0, 0], + ): + super().__init__() + self.dim = dim + self.window_size = window_size # Wh, Ww + self.pretrained_window_size = pretrained_window_size + self.num_heads = num_heads + + self.logit_scale = nn.Parameter(torch.log(10 * torch.ones((num_heads, 1, 1))), requires_grad=True) # type: ignore + + # mlp to generate continuous relative position bias + self.cpb_mlp = nn.Sequential( + nn.Linear(2, 512, bias=True), + nn.ReLU(inplace=True), + nn.Linear(512, num_heads, bias=False), + ) + + # get relative_coords_table + relative_coords_h = torch.arange( + -(self.window_size[0] - 1), self.window_size[0], dtype=torch.float32 + ) + relative_coords_w = torch.arange( + -(self.window_size[1] - 1), self.window_size[1], dtype=torch.float32 + ) + relative_coords_table = ( + torch.stack(torch.meshgrid([relative_coords_h, relative_coords_w])) + .permute(1, 2, 0) + .contiguous() + .unsqueeze(0) + ) # 1, 2*Wh-1, 2*Ww-1, 2 + if pretrained_window_size[0] > 0: + relative_coords_table[:, :, :, 0] /= pretrained_window_size[0] - 1 + relative_coords_table[:, :, :, 1] /= pretrained_window_size[1] - 1 + else: + relative_coords_table[:, :, :, 0] /= self.window_size[0] - 1 + relative_coords_table[:, :, :, 1] /= self.window_size[1] - 1 + relative_coords_table *= 8 # normalize to -8, 8 + relative_coords_table = ( + torch.sign(relative_coords_table) + * torch.log2(torch.abs(relative_coords_table) + 1.0) + / np.log2(8) + ) + + self.register_buffer("relative_coords_table", relative_coords_table) + + # get pair-wise relative position index for each token inside the window + coords_h = torch.arange(self.window_size[0]) + coords_w = torch.arange(self.window_size[1]) + coords = torch.stack(torch.meshgrid([coords_h, coords_w])) # 2, Wh, Ww + coords_flatten = torch.flatten(coords, 1) # 2, Wh*Ww + relative_coords = ( + coords_flatten[:, :, None] - coords_flatten[:, None, :] + ) # 2, Wh*Ww, Wh*Ww + relative_coords = relative_coords.permute( + 1, 2, 0 + ).contiguous() # Wh*Ww, Wh*Ww, 2 + relative_coords[:, :, 0] += self.window_size[0] - 1 # shift to start from 0 + relative_coords[:, :, 1] += self.window_size[1] - 1 + relative_coords[:, :, 0] *= 2 * self.window_size[1] - 1 + relative_position_index = relative_coords.sum(-1) # Wh*Ww, Wh*Ww + self.register_buffer("relative_position_index", relative_position_index) + + self.qkv = nn.Linear(dim, dim * 3, bias=False) + if qkv_bias: + self.q_bias = nn.Parameter(torch.zeros(dim)) # type: ignore + self.v_bias = nn.Parameter(torch.zeros(dim)) # type: ignore + else: + self.q_bias = None + self.v_bias = None + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(dim, dim) + self.proj_drop = nn.Dropout(proj_drop) + self.softmax = nn.Softmax(dim=-1) + + def forward(self, x, mask=None): + """ + Args: + x: input features with shape of (num_windows*B, N, C) + mask: (0/-inf) mask with shape of (num_windows, Wh*Ww, Wh*Ww) or None + """ + B_, N, C = x.shape + qkv_bias = None + if self.q_bias is not None: + qkv_bias = torch.cat((self.q_bias, torch.zeros_like(self.v_bias, requires_grad=False), self.v_bias)) # type: ignore + qkv = F.linear(input=x, weight=self.qkv.weight, bias=qkv_bias) + qkv = qkv.reshape(B_, N, 3, self.num_heads, -1).permute(2, 0, 3, 1, 4) + q, k, v = ( + qkv[0], + qkv[1], + qkv[2], + ) # make torchscript happy (cannot use tensor as tuple) + + # cosine attention + attn = F.normalize(q, dim=-1) @ F.normalize(k, dim=-1).transpose(-2, -1) + logit_scale = torch.clamp( + self.logit_scale, + max=torch.log(torch.tensor(1.0 / 0.01)).to(self.logit_scale.device), + ).exp() + attn = attn * logit_scale + + relative_position_bias_table = self.cpb_mlp(self.relative_coords_table).view( + -1, self.num_heads + ) + relative_position_bias = relative_position_bias_table[self.relative_position_index.view(-1)].view( # type: ignore + self.window_size[0] * self.window_size[1], + self.window_size[0] * self.window_size[1], + -1, + ) # Wh*Ww,Wh*Ww,nH + relative_position_bias = relative_position_bias.permute( + 2, 0, 1 + ).contiguous() # nH, Wh*Ww, Wh*Ww + relative_position_bias = 16 * torch.sigmoid(relative_position_bias) + attn = attn + relative_position_bias.unsqueeze(0) + + if mask is not None: + nW = mask.shape[0] + attn = attn.view(B_ // nW, nW, self.num_heads, N, N) + mask.unsqueeze( + 1 + ).unsqueeze(0) + attn = attn.view(-1, self.num_heads, N, N) + attn = self.softmax(attn) + else: + attn = self.softmax(attn) + + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B_, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + + def extra_repr(self) -> str: + return ( + f"dim={self.dim}, window_size={self.window_size}, " + f"pretrained_window_size={self.pretrained_window_size}, num_heads={self.num_heads}" + ) + + def flops(self, N): + # calculate flops for 1 window with token length of N + flops = 0 + # qkv = self.qkv(x) + flops += N * self.dim * 3 * self.dim + # attn = (q @ k.transpose(-2, -1)) + flops += self.num_heads * N * (self.dim // self.num_heads) * N + # x = (attn @ v) + flops += self.num_heads * N * N * (self.dim // self.num_heads) + # x = self.proj(x) + flops += N * self.dim * self.dim + return flops + + +class SwinTransformerBlock(nn.Module): + r"""Swin Transformer Block. + Args: + dim (int): Number of input channels. + input_resolution (tuple[int]): Input resulotion. + num_heads (int): Number of attention heads. + window_size (int): Window size. + shift_size (int): Shift size for SW-MSA. + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. + qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True + drop (float, optional): Dropout rate. Default: 0.0 + attn_drop (float, optional): Attention dropout rate. Default: 0.0 + drop_path (float, optional): Stochastic depth rate. Default: 0.0 + act_layer (nn.Module, optional): Activation layer. Default: nn.GELU + norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm + pretrained_window_size (int): Window size in pre-training. + """ + + def __init__( + self, + dim, + input_resolution, + num_heads, + window_size=7, + shift_size=0, + mlp_ratio=4.0, + qkv_bias=True, + drop=0.0, + attn_drop=0.0, + drop_path=0.0, + act_layer=nn.GELU, + norm_layer=nn.LayerNorm, + pretrained_window_size=0, + ): + super().__init__() + self.dim = dim + self.input_resolution = input_resolution + self.num_heads = num_heads + self.window_size = window_size + self.shift_size = shift_size + self.mlp_ratio = mlp_ratio + if min(self.input_resolution) <= self.window_size: + # if window size is larger than input resolution, we don't partition windows + self.shift_size = 0 + self.window_size = min(self.input_resolution) + assert ( + 0 <= self.shift_size < self.window_size + ), "shift_size must in 0-window_size" + + self.norm1 = norm_layer(dim) + self.attn = WindowAttention( + dim, + window_size=to_2tuple(self.window_size), + num_heads=num_heads, + qkv_bias=qkv_bias, + attn_drop=attn_drop, + proj_drop=drop, + pretrained_window_size=to_2tuple(pretrained_window_size), + ) + + self.drop_path = DropPath(drop_path) if drop_path > 0.0 else nn.Identity() + self.norm2 = norm_layer(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = Mlp( + in_features=dim, + hidden_features=mlp_hidden_dim, + act_layer=act_layer, + drop=drop, + ) + + if self.shift_size > 0: + attn_mask = self.calculate_mask(self.input_resolution) + else: + attn_mask = None + + self.register_buffer("attn_mask", attn_mask) + + def calculate_mask(self, x_size): + # calculate attention mask for SW-MSA + H, W = x_size + img_mask = torch.zeros((1, H, W, 1)) # 1 H W 1 + h_slices = ( + slice(0, -self.window_size), + slice(-self.window_size, -self.shift_size), + slice(-self.shift_size, None), + ) + w_slices = ( + slice(0, -self.window_size), + slice(-self.window_size, -self.shift_size), + slice(-self.shift_size, None), + ) + cnt = 0 + for h in h_slices: + for w in w_slices: + img_mask[:, h, w, :] = cnt + cnt += 1 + + mask_windows = window_partition( + img_mask, self.window_size + ) # nW, window_size, window_size, 1 + mask_windows = mask_windows.view(-1, self.window_size * self.window_size) + attn_mask = mask_windows.unsqueeze(1) - mask_windows.unsqueeze(2) + attn_mask = attn_mask.masked_fill(attn_mask != 0, float(-100.0)).masked_fill( + attn_mask == 0, float(0.0) + ) + + return attn_mask + + def forward(self, x, x_size): + H, W = x_size + B, L, C = x.shape + # assert L == H * W, "input feature has wrong size" + + shortcut = x + x = x.view(B, H, W, C) + + # cyclic shift + if self.shift_size > 0: + shifted_x = torch.roll( + x, shifts=(-self.shift_size, -self.shift_size), dims=(1, 2) + ) + else: + shifted_x = x + + # partition windows + x_windows = window_partition( + shifted_x, self.window_size + ) # nW*B, window_size, window_size, C + x_windows = x_windows.view( + -1, self.window_size * self.window_size, C + ) # nW*B, window_size*window_size, C + + # W-MSA/SW-MSA (to be compatible for testing on images whose shapes are the multiple of window size + if self.input_resolution == x_size: + attn_windows = self.attn( + x_windows, mask=self.attn_mask + ) # nW*B, window_size*window_size, C + else: + attn_windows = self.attn( + x_windows, mask=self.calculate_mask(x_size).to(x.device) + ) + + # merge windows + attn_windows = attn_windows.view(-1, self.window_size, self.window_size, C) + shifted_x = window_reverse(attn_windows, self.window_size, H, W) # B H' W' C + + # reverse cyclic shift + if self.shift_size > 0: + x = torch.roll( + shifted_x, shifts=(self.shift_size, self.shift_size), dims=(1, 2) + ) + else: + x = shifted_x + x = x.view(B, H * W, C) + x = shortcut + self.drop_path(self.norm1(x)) + + # FFN + x = x + self.drop_path(self.norm2(self.mlp(x))) + + return x + + def extra_repr(self) -> str: + return ( + f"dim={self.dim}, input_resolution={self.input_resolution}, num_heads={self.num_heads}, " + f"window_size={self.window_size}, shift_size={self.shift_size}, mlp_ratio={self.mlp_ratio}" + ) + + def flops(self): + flops = 0 + H, W = self.input_resolution + # norm1 + flops += self.dim * H * W + # W-MSA/SW-MSA + nW = H * W / self.window_size / self.window_size + flops += nW * self.attn.flops(self.window_size * self.window_size) + # mlp + flops += 2 * H * W * self.dim * self.dim * self.mlp_ratio + # norm2 + flops += self.dim * H * W + return flops + + +class PatchMerging(nn.Module): + r"""Patch Merging Layer. + Args: + input_resolution (tuple[int]): Resolution of input feature. + dim (int): Number of input channels. + norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm + """ + + def __init__(self, input_resolution, dim, norm_layer=nn.LayerNorm): + super().__init__() + self.input_resolution = input_resolution + self.dim = dim + self.reduction = nn.Linear(4 * dim, 2 * dim, bias=False) + self.norm = norm_layer(2 * dim) + + def forward(self, x): + """ + x: B, H*W, C + """ + H, W = self.input_resolution + B, L, C = x.shape + assert L == H * W, "input feature has wrong size" + assert H % 2 == 0 and W % 2 == 0, f"x size ({H}*{W}) are not even." + + x = x.view(B, H, W, C) + + x0 = x[:, 0::2, 0::2, :] # B H/2 W/2 C + x1 = x[:, 1::2, 0::2, :] # B H/2 W/2 C + x2 = x[:, 0::2, 1::2, :] # B H/2 W/2 C + x3 = x[:, 1::2, 1::2, :] # B H/2 W/2 C + x = torch.cat([x0, x1, x2, x3], -1) # B H/2 W/2 4*C + x = x.view(B, -1, 4 * C) # B H/2*W/2 4*C + + x = self.reduction(x) + x = self.norm(x) + + return x + + def extra_repr(self) -> str: + return f"input_resolution={self.input_resolution}, dim={self.dim}" + + def flops(self): + H, W = self.input_resolution + flops = (H // 2) * (W // 2) * 4 * self.dim * 2 * self.dim + flops += H * W * self.dim // 2 + return flops + + +class BasicLayer(nn.Module): + """A basic Swin Transformer layer for one stage. + Args: + dim (int): Number of input channels. + input_resolution (tuple[int]): Input resolution. + depth (int): Number of blocks. + num_heads (int): Number of attention heads. + window_size (int): Local window size. + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. + qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True + drop (float, optional): Dropout rate. Default: 0.0 + attn_drop (float, optional): Attention dropout rate. Default: 0.0 + drop_path (float | tuple[float], optional): Stochastic depth rate. Default: 0.0 + norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm + downsample (nn.Module | None, optional): Downsample layer at the end of the layer. Default: None + use_checkpoint (bool): Whether to use checkpointing to save memory. Default: False. + pretrained_window_size (int): Local window size in pre-training. + """ + + def __init__( + self, + dim, + input_resolution, + depth, + num_heads, + window_size, + mlp_ratio=4.0, + qkv_bias=True, + drop=0.0, + attn_drop=0.0, + drop_path=0.0, + norm_layer=nn.LayerNorm, + downsample=None, + use_checkpoint=False, + pretrained_window_size=0, + ): + super().__init__() + self.dim = dim + self.input_resolution = input_resolution + self.depth = depth + self.use_checkpoint = use_checkpoint + + # build blocks + self.blocks = nn.ModuleList( + [ + SwinTransformerBlock( + dim=dim, + input_resolution=input_resolution, + num_heads=num_heads, + window_size=window_size, + shift_size=0 if (i % 2 == 0) else window_size // 2, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + drop=drop, + attn_drop=attn_drop, + drop_path=drop_path[i] + if isinstance(drop_path, list) + else drop_path, + norm_layer=norm_layer, + pretrained_window_size=pretrained_window_size, + ) + for i in range(depth) + ] + ) + + # patch merging layer + if downsample is not None: + self.downsample = downsample( + input_resolution, dim=dim, norm_layer=norm_layer + ) + else: + self.downsample = None + + def forward(self, x, x_size): + for blk in self.blocks: + if self.use_checkpoint: + x = checkpoint.checkpoint(blk, x, x_size) + else: + x = blk(x, x_size) + if self.downsample is not None: + x = self.downsample(x) + return x + + def extra_repr(self) -> str: + return f"dim={self.dim}, input_resolution={self.input_resolution}, depth={self.depth}" + + def flops(self): + flops = 0 + for blk in self.blocks: + flops += blk.flops() # type: ignore + if self.downsample is not None: + flops += self.downsample.flops() + return flops + + def _init_respostnorm(self): + for blk in self.blocks: + nn.init.constant_(blk.norm1.bias, 0) # type: ignore + nn.init.constant_(blk.norm1.weight, 0) # type: ignore + nn.init.constant_(blk.norm2.bias, 0) # type: ignore + nn.init.constant_(blk.norm2.weight, 0) # type: ignore + + +class PatchEmbed(nn.Module): + r"""Image to Patch Embedding + Args: + img_size (int): Image size. Default: 224. + patch_size (int): Patch token size. Default: 4. + in_chans (int): Number of input image channels. Default: 3. + embed_dim (int): Number of linear projection output channels. Default: 96. + norm_layer (nn.Module, optional): Normalization layer. Default: None + """ + + def __init__( + self, img_size=224, patch_size=4, in_chans=3, embed_dim=96, norm_layer=None + ): + super().__init__() + img_size = to_2tuple(img_size) + patch_size = to_2tuple(patch_size) + patches_resolution = [img_size[0] // patch_size[0], img_size[1] // patch_size[1]] # type: ignore + self.img_size = img_size + self.patch_size = patch_size + self.patches_resolution = patches_resolution + self.num_patches = patches_resolution[0] * patches_resolution[1] + + self.in_chans = in_chans + self.embed_dim = embed_dim + + self.proj = nn.Conv2d( + in_chans, embed_dim, kernel_size=patch_size, stride=patch_size # type: ignore + ) + if norm_layer is not None: + self.norm = norm_layer(embed_dim) + else: + self.norm = None + + def forward(self, x): + B, C, H, W = x.shape + # FIXME look at relaxing size constraints + # assert H == self.img_size[0] and W == self.img_size[1], + # f"Input image size ({H}*{W}) doesn't match model ({self.img_size[0]}*{self.img_size[1]})." + x = self.proj(x).flatten(2).transpose(1, 2) # B Ph*Pw C + if self.norm is not None: + x = self.norm(x) + return x + + def flops(self): + Ho, Wo = self.patches_resolution + flops = Ho * Wo * self.embed_dim * self.in_chans * (self.patch_size[0] * self.patch_size[1]) # type: ignore + if self.norm is not None: + flops += Ho * Wo * self.embed_dim + return flops + + +class RSTB(nn.Module): + """Residual Swin Transformer Block (RSTB). + + Args: + dim (int): Number of input channels. + input_resolution (tuple[int]): Input resolution. + depth (int): Number of blocks. + num_heads (int): Number of attention heads. + window_size (int): Local window size. + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. + qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True + drop (float, optional): Dropout rate. Default: 0.0 + attn_drop (float, optional): Attention dropout rate. Default: 0.0 + drop_path (float | tuple[float], optional): Stochastic depth rate. Default: 0.0 + norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm + downsample (nn.Module | None, optional): Downsample layer at the end of the layer. Default: None + use_checkpoint (bool): Whether to use checkpointing to save memory. Default: False. + img_size: Input image size. + patch_size: Patch size. + resi_connection: The convolutional block before residual connection. + """ + + def __init__( + self, + dim, + input_resolution, + depth, + num_heads, + window_size, + mlp_ratio=4.0, + qkv_bias=True, + drop=0.0, + attn_drop=0.0, + drop_path=0.0, + norm_layer=nn.LayerNorm, + downsample=None, + use_checkpoint=False, + img_size=224, + patch_size=4, + resi_connection="1conv", + ): + super(RSTB, self).__init__() + + self.dim = dim + self.input_resolution = input_resolution + + self.residual_group = BasicLayer( + dim=dim, + input_resolution=input_resolution, + depth=depth, + num_heads=num_heads, + window_size=window_size, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + drop=drop, + attn_drop=attn_drop, + drop_path=drop_path, + norm_layer=norm_layer, + downsample=downsample, + use_checkpoint=use_checkpoint, + ) + + if resi_connection == "1conv": + self.conv = nn.Conv2d(dim, dim, 3, 1, 1) + elif resi_connection == "3conv": + # to save parameters and memory + self.conv = nn.Sequential( + nn.Conv2d(dim, dim // 4, 3, 1, 1), + nn.LeakyReLU(negative_slope=0.2, inplace=True), + nn.Conv2d(dim // 4, dim // 4, 1, 1, 0), + nn.LeakyReLU(negative_slope=0.2, inplace=True), + nn.Conv2d(dim // 4, dim, 3, 1, 1), + ) + + self.patch_embed = PatchEmbed( + img_size=img_size, + patch_size=patch_size, + in_chans=dim, + embed_dim=dim, + norm_layer=None, + ) + + self.patch_unembed = PatchUnEmbed( + img_size=img_size, + patch_size=patch_size, + in_chans=dim, + embed_dim=dim, + norm_layer=None, + ) + + def forward(self, x, x_size): + return ( + self.patch_embed( + self.conv(self.patch_unembed(self.residual_group(x, x_size), x_size)) + ) + + x + ) + + def flops(self): + flops = 0 + flops += self.residual_group.flops() + H, W = self.input_resolution + flops += H * W * self.dim * self.dim * 9 + flops += self.patch_embed.flops() + flops += self.patch_unembed.flops() + + return flops + + +class PatchUnEmbed(nn.Module): + r"""Image to Patch Unembedding + + Args: + img_size (int): Image size. Default: 224. + patch_size (int): Patch token size. Default: 4. + in_chans (int): Number of input image channels. Default: 3. + embed_dim (int): Number of linear projection output channels. Default: 96. + norm_layer (nn.Module, optional): Normalization layer. Default: None + """ + + def __init__( + self, img_size=224, patch_size=4, in_chans=3, embed_dim=96, norm_layer=None + ): + super().__init__() + img_size = to_2tuple(img_size) + patch_size = to_2tuple(patch_size) + patches_resolution = [img_size[0] // patch_size[0], img_size[1] // patch_size[1]] # type: ignore + self.img_size = img_size + self.patch_size = patch_size + self.patches_resolution = patches_resolution + self.num_patches = patches_resolution[0] * patches_resolution[1] + + self.in_chans = in_chans + self.embed_dim = embed_dim + + def forward(self, x, x_size): + B, HW, C = x.shape + x = x.transpose(1, 2).view(B, self.embed_dim, x_size[0], x_size[1]) # B Ph*Pw C + return x + + def flops(self): + flops = 0 + return flops + + +class Upsample(nn.Sequential): + """Upsample module. + + Args: + scale (int): Scale factor. Supported scales: 2^n and 3. + num_feat (int): Channel number of intermediate features. + """ + + def __init__(self, scale, num_feat): + m = [] + if (scale & (scale - 1)) == 0: # scale = 2^n + for _ in range(int(math.log(scale, 2))): + m.append(nn.Conv2d(num_feat, 4 * num_feat, 3, 1, 1)) + m.append(nn.PixelShuffle(2)) + elif scale == 3: + m.append(nn.Conv2d(num_feat, 9 * num_feat, 3, 1, 1)) + m.append(nn.PixelShuffle(3)) + else: + raise ValueError( + f"scale {scale} is not supported. " "Supported scales: 2^n and 3." + ) + super(Upsample, self).__init__(*m) + + +class Upsample_hf(nn.Sequential): + """Upsample module. + + Args: + scale (int): Scale factor. Supported scales: 2^n and 3. + num_feat (int): Channel number of intermediate features. + """ + + def __init__(self, scale, num_feat): + m = [] + if (scale & (scale - 1)) == 0: # scale = 2^n + for _ in range(int(math.log(scale, 2))): + m.append(nn.Conv2d(num_feat, 4 * num_feat, 3, 1, 1)) + m.append(nn.PixelShuffle(2)) + elif scale == 3: + m.append(nn.Conv2d(num_feat, 9 * num_feat, 3, 1, 1)) + m.append(nn.PixelShuffle(3)) + else: + raise ValueError( + f"scale {scale} is not supported. " "Supported scales: 2^n and 3." + ) + super(Upsample_hf, self).__init__(*m) + + +class UpsampleOneStep(nn.Sequential): + """UpsampleOneStep module (the difference with Upsample is that it always only has 1conv + 1pixelshuffle) + Used in lightweight SR to save parameters. + + Args: + scale (int): Scale factor. Supported scales: 2^n and 3. + num_feat (int): Channel number of intermediate features. + + """ + + def __init__(self, scale, num_feat, num_out_ch, input_resolution=None): + self.num_feat = num_feat + self.input_resolution = input_resolution + m = [] + m.append(nn.Conv2d(num_feat, (scale**2) * num_out_ch, 3, 1, 1)) + m.append(nn.PixelShuffle(scale)) + super(UpsampleOneStep, self).__init__(*m) + + def flops(self): + H, W = self.input_resolution # type: ignore + flops = H * W * self.num_feat * 3 * 9 + return flops + + +class Swin2SR(nn.Module): + r"""Swin2SR + A PyTorch impl of : `Swin2SR: SwinV2 Transformer for Compressed Image Super-Resolution and Restoration`. + + Args: + img_size (int | tuple(int)): Input image size. Default 64 + patch_size (int | tuple(int)): Patch size. Default: 1 + in_chans (int): Number of input image channels. Default: 3 + embed_dim (int): Patch embedding dimension. Default: 96 + depths (tuple(int)): Depth of each Swin Transformer layer. + num_heads (tuple(int)): Number of attention heads in different layers. + window_size (int): Window size. Default: 7 + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. Default: 4 + qkv_bias (bool): If True, add a learnable bias to query, key, value. Default: True + drop_rate (float): Dropout rate. Default: 0 + attn_drop_rate (float): Attention dropout rate. Default: 0 + drop_path_rate (float): Stochastic depth rate. Default: 0.1 + norm_layer (nn.Module): Normalization layer. Default: nn.LayerNorm. + ape (bool): If True, add absolute position embedding to the patch embedding. Default: False + patch_norm (bool): If True, add normalization after patch embedding. Default: True + use_checkpoint (bool): Whether to use checkpointing to save memory. Default: False + upscale: Upscale factor. 2/3/4/8 for image SR, 1 for denoising and compress artifact reduction + img_range: Image range. 1. or 255. + upsampler: The reconstruction reconstruction module. 'pixelshuffle'/'pixelshuffledirect'/'nearest+conv'/None + resi_connection: The convolutional block before residual connection. '1conv'/'3conv' + """ + + def __init__( + self, + state_dict, + **kwargs, + ): + super(Swin2SR, self).__init__() + + # Defaults + img_size = 128 + patch_size = 1 + in_chans = 3 + embed_dim = 96 + depths = [6, 6, 6, 6] + num_heads = [6, 6, 6, 6] + window_size = 7 + mlp_ratio = 4.0 + qkv_bias = True + drop_rate = 0.0 + attn_drop_rate = 0.0 + drop_path_rate = 0.1 + norm_layer = nn.LayerNorm + ape = False + patch_norm = True + use_checkpoint = False + upscale = 2 + img_range = 1.0 + upsampler = "" + resi_connection = "1conv" + num_in_ch = in_chans + num_out_ch = in_chans + num_feat = 64 + + self.model_arch = "Swin2SR" + self.sub_type = "SR" + self.state = state_dict + if "params_ema" in self.state: + self.state = self.state["params_ema"] + elif "params" in self.state: + self.state = self.state["params"] + + state_keys = self.state.keys() + + if "conv_before_upsample.0.weight" in state_keys: + if "conv_aux.weight" in state_keys: + upsampler = "pixelshuffle_aux" + elif "conv_up1.weight" in state_keys: + upsampler = "nearest+conv" + else: + upsampler = "pixelshuffle" + supports_fp16 = False + elif "upsample.0.weight" in state_keys: + upsampler = "pixelshuffledirect" + else: + upsampler = "" + + num_feat = ( + self.state.get("conv_before_upsample.0.weight", None).shape[1] + if self.state.get("conv_before_upsample.weight", None) + else 64 + ) + + num_in_ch = self.state["conv_first.weight"].shape[1] + in_chans = num_in_ch + if "conv_last.weight" in state_keys: + num_out_ch = self.state["conv_last.weight"].shape[0] + else: + num_out_ch = num_in_ch + + upscale = 1 + if upsampler == "nearest+conv": + upsample_keys = [ + x for x in state_keys if "conv_up" in x and "bias" not in x + ] + + for upsample_key in upsample_keys: + upscale *= 2 + elif upsampler == "pixelshuffle" or upsampler == "pixelshuffle_aux": + upsample_keys = [ + x + for x in state_keys + if "upsample" in x and "conv" not in x and "bias" not in x + ] + for upsample_key in upsample_keys: + shape = self.state[upsample_key].shape[0] + upscale *= math.sqrt(shape // num_feat) + upscale = int(upscale) + elif upsampler == "pixelshuffledirect": + upscale = int( + math.sqrt(self.state["upsample.0.bias"].shape[0] // num_out_ch) + ) + + max_layer_num = 0 + max_block_num = 0 + for key in state_keys: + result = re.match( + r"layers.(\d*).residual_group.blocks.(\d*).norm1.weight", key + ) + if result: + layer_num, block_num = result.groups() + max_layer_num = max(max_layer_num, int(layer_num)) + max_block_num = max(max_block_num, int(block_num)) + + depths = [max_block_num + 1 for _ in range(max_layer_num + 1)] + + if ( + "layers.0.residual_group.blocks.0.attn.relative_position_bias_table" + in state_keys + ): + num_heads_num = self.state[ + "layers.0.residual_group.blocks.0.attn.relative_position_bias_table" + ].shape[-1] + num_heads = [num_heads_num for _ in range(max_layer_num + 1)] + else: + num_heads = depths + + embed_dim = self.state["conv_first.weight"].shape[0] + + mlp_ratio = float( + self.state["layers.0.residual_group.blocks.0.mlp.fc1.bias"].shape[0] + / embed_dim + ) + + # TODO: could actually count the layers, but this should do + if "layers.0.conv.4.weight" in state_keys: + resi_connection = "3conv" + else: + resi_connection = "1conv" + + window_size = int( + math.sqrt( + self.state[ + "layers.0.residual_group.blocks.0.attn.relative_position_index" + ].shape[0] + ) + ) + + if "layers.0.residual_group.blocks.1.attn_mask" in state_keys: + img_size = int( + math.sqrt( + self.state["layers.0.residual_group.blocks.1.attn_mask"].shape[0] + ) + * window_size + ) + + # The JPEG models are the only ones with window-size 7, and they also use this range + img_range = 255.0 if window_size == 7 else 1.0 + + self.in_nc = num_in_ch + self.out_nc = num_out_ch + self.num_feat = num_feat + self.embed_dim = embed_dim + self.num_heads = num_heads + self.depths = depths + self.window_size = window_size + self.mlp_ratio = mlp_ratio + self.scale = upscale + self.upsampler = upsampler + self.img_size = img_size + self.img_range = img_range + self.resi_connection = resi_connection + + self.supports_fp16 = False # Too much weirdness to support this at the moment + self.supports_bfp16 = True + self.min_size_restriction = 16 + + ## END AUTO DETECTION + + if in_chans == 3: + rgb_mean = (0.4488, 0.4371, 0.4040) + self.mean = torch.Tensor(rgb_mean).view(1, 3, 1, 1) + else: + self.mean = torch.zeros(1, 1, 1, 1) + self.upscale = upscale + self.upsampler = upsampler + self.window_size = window_size + + ##################################################################################################### + ################################### 1, shallow feature extraction ################################### + self.conv_first = nn.Conv2d(num_in_ch, embed_dim, 3, 1, 1) + + ##################################################################################################### + ################################### 2, deep feature extraction ###################################### + self.num_layers = len(depths) + self.embed_dim = embed_dim + self.ape = ape + self.patch_norm = patch_norm + self.num_features = embed_dim + self.mlp_ratio = mlp_ratio + + # split image into non-overlapping patches + self.patch_embed = PatchEmbed( + img_size=img_size, + patch_size=patch_size, + in_chans=embed_dim, + embed_dim=embed_dim, + norm_layer=norm_layer if self.patch_norm else None, + ) + num_patches = self.patch_embed.num_patches + patches_resolution = self.patch_embed.patches_resolution + self.patches_resolution = patches_resolution + + # merge non-overlapping patches into image + self.patch_unembed = PatchUnEmbed( + img_size=img_size, + patch_size=patch_size, + in_chans=embed_dim, + embed_dim=embed_dim, + norm_layer=norm_layer if self.patch_norm else None, + ) + + # absolute position embedding + if self.ape: + self.absolute_pos_embed = nn.Parameter(torch.zeros(1, num_patches, embed_dim)) # type: ignore + trunc_normal_(self.absolute_pos_embed, std=0.02) + + self.pos_drop = nn.Dropout(p=drop_rate) + + # stochastic depth + dpr = [ + x.item() for x in torch.linspace(0, drop_path_rate, sum(depths)) + ] # stochastic depth decay rule + + # build Residual Swin Transformer blocks (RSTB) + self.layers = nn.ModuleList() + for i_layer in range(self.num_layers): + layer = RSTB( + dim=embed_dim, + input_resolution=(patches_resolution[0], patches_resolution[1]), + depth=depths[i_layer], + num_heads=num_heads[i_layer], + window_size=window_size, + mlp_ratio=self.mlp_ratio, + qkv_bias=qkv_bias, + drop=drop_rate, + attn_drop=attn_drop_rate, + drop_path=dpr[sum(depths[:i_layer]) : sum(depths[: i_layer + 1])], # type: ignore # no impact on SR results + norm_layer=norm_layer, + downsample=None, + use_checkpoint=use_checkpoint, + img_size=img_size, + patch_size=patch_size, + resi_connection=resi_connection, + ) + self.layers.append(layer) + + if self.upsampler == "pixelshuffle_hf": + self.layers_hf = nn.ModuleList() + for i_layer in range(self.num_layers): + layer = RSTB( + dim=embed_dim, + input_resolution=(patches_resolution[0], patches_resolution[1]), + depth=depths[i_layer], + num_heads=num_heads[i_layer], + window_size=window_size, + mlp_ratio=self.mlp_ratio, + qkv_bias=qkv_bias, + drop=drop_rate, + attn_drop=attn_drop_rate, + drop_path=dpr[sum(depths[:i_layer]) : sum(depths[: i_layer + 1])], # type: ignore # no impact on SR results # type: ignore + norm_layer=norm_layer, + downsample=None, + use_checkpoint=use_checkpoint, + img_size=img_size, + patch_size=patch_size, + resi_connection=resi_connection, + ) + self.layers_hf.append(layer) + + self.norm = norm_layer(self.num_features) + + # build the last conv layer in deep feature extraction + if resi_connection == "1conv": + self.conv_after_body = nn.Conv2d(embed_dim, embed_dim, 3, 1, 1) + elif resi_connection == "3conv": + # to save parameters and memory + self.conv_after_body = nn.Sequential( + nn.Conv2d(embed_dim, embed_dim // 4, 3, 1, 1), + nn.LeakyReLU(negative_slope=0.2, inplace=True), + nn.Conv2d(embed_dim // 4, embed_dim // 4, 1, 1, 0), + nn.LeakyReLU(negative_slope=0.2, inplace=True), + nn.Conv2d(embed_dim // 4, embed_dim, 3, 1, 1), + ) + + ##################################################################################################### + ################################ 3, high quality image reconstruction ################################ + if self.upsampler == "pixelshuffle": + # for classical SR + self.conv_before_upsample = nn.Sequential( + nn.Conv2d(embed_dim, num_feat, 3, 1, 1), nn.LeakyReLU(inplace=True) + ) + self.upsample = Upsample(upscale, num_feat) + self.conv_last = nn.Conv2d(num_feat, num_out_ch, 3, 1, 1) + elif self.upsampler == "pixelshuffle_aux": + self.conv_bicubic = nn.Conv2d(num_in_ch, num_feat, 3, 1, 1) + self.conv_before_upsample = nn.Sequential( + nn.Conv2d(embed_dim, num_feat, 3, 1, 1), nn.LeakyReLU(inplace=True) + ) + self.conv_aux = nn.Conv2d(num_feat, num_out_ch, 3, 1, 1) + self.conv_after_aux = nn.Sequential( + nn.Conv2d(3, num_feat, 3, 1, 1), nn.LeakyReLU(inplace=True) + ) + self.upsample = Upsample(upscale, num_feat) + self.conv_last = nn.Conv2d(num_feat, num_out_ch, 3, 1, 1) + + elif self.upsampler == "pixelshuffle_hf": + self.conv_before_upsample = nn.Sequential( + nn.Conv2d(embed_dim, num_feat, 3, 1, 1), nn.LeakyReLU(inplace=True) + ) + self.upsample = Upsample(upscale, num_feat) + self.upsample_hf = Upsample_hf(upscale, num_feat) + self.conv_last = nn.Conv2d(num_feat, num_out_ch, 3, 1, 1) + self.conv_first_hf = nn.Sequential( + nn.Conv2d(num_feat, embed_dim, 3, 1, 1), nn.LeakyReLU(inplace=True) + ) + self.conv_after_body_hf = nn.Conv2d(embed_dim, embed_dim, 3, 1, 1) + self.conv_before_upsample_hf = nn.Sequential( + nn.Conv2d(embed_dim, num_feat, 3, 1, 1), nn.LeakyReLU(inplace=True) + ) + self.conv_last_hf = nn.Conv2d(num_feat, num_out_ch, 3, 1, 1) + + elif self.upsampler == "pixelshuffledirect": + # for lightweight SR (to save parameters) + self.upsample = UpsampleOneStep( + upscale, + embed_dim, + num_out_ch, + (patches_resolution[0], patches_resolution[1]), + ) + elif self.upsampler == "nearest+conv": + # for real-world SR (less artifacts) + assert self.upscale == 4, "only support x4 now." + self.conv_before_upsample = nn.Sequential( + nn.Conv2d(embed_dim, num_feat, 3, 1, 1), nn.LeakyReLU(inplace=True) + ) + self.conv_up1 = nn.Conv2d(num_feat, num_feat, 3, 1, 1) + self.conv_up2 = nn.Conv2d(num_feat, num_feat, 3, 1, 1) + self.conv_hr = nn.Conv2d(num_feat, num_feat, 3, 1, 1) + self.conv_last = nn.Conv2d(num_feat, num_out_ch, 3, 1, 1) + self.lrelu = nn.LeakyReLU(negative_slope=0.2, inplace=True) + else: + # for image denoising and JPEG compression artifact reduction + self.conv_last = nn.Conv2d(embed_dim, num_out_ch, 3, 1, 1) + + self.apply(self._init_weights) + + self.load_state_dict(state_dict) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=0.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + @torch.jit.ignore # type: ignore + def no_weight_decay(self): + return {"absolute_pos_embed"} + + @torch.jit.ignore # type: ignore + def no_weight_decay_keywords(self): + return {"relative_position_bias_table"} + + def check_image_size(self, x): + _, _, h, w = x.size() + mod_pad_h = (self.window_size - h % self.window_size) % self.window_size + mod_pad_w = (self.window_size - w % self.window_size) % self.window_size + x = F.pad(x, (0, mod_pad_w, 0, mod_pad_h), "reflect") + return x + + def forward_features(self, x): + x_size = (x.shape[2], x.shape[3]) + x = self.patch_embed(x) + if self.ape: + x = x + self.absolute_pos_embed + x = self.pos_drop(x) + + for layer in self.layers: + x = layer(x, x_size) + + x = self.norm(x) # B L C + x = self.patch_unembed(x, x_size) + + return x + + def forward_features_hf(self, x): + x_size = (x.shape[2], x.shape[3]) + x = self.patch_embed(x) + if self.ape: + x = x + self.absolute_pos_embed + x = self.pos_drop(x) + + for layer in self.layers_hf: + x = layer(x, x_size) + + x = self.norm(x) # B L C + x = self.patch_unembed(x, x_size) + + return x + + def forward(self, x): + H, W = x.shape[2:] + x = self.check_image_size(x) + + self.mean = self.mean.type_as(x) + x = (x - self.mean) * self.img_range + + if self.upsampler == "pixelshuffle": + # for classical SR + x = self.conv_first(x) + x = self.conv_after_body(self.forward_features(x)) + x + x = self.conv_before_upsample(x) + x = self.conv_last(self.upsample(x)) + elif self.upsampler == "pixelshuffle_aux": + bicubic = F.interpolate( + x, + size=(H * self.upscale, W * self.upscale), + mode="bicubic", + align_corners=False, + ) + bicubic = self.conv_bicubic(bicubic) + x = self.conv_first(x) + x = self.conv_after_body(self.forward_features(x)) + x + x = self.conv_before_upsample(x) + aux = self.conv_aux(x) # b, 3, LR_H, LR_W + x = self.conv_after_aux(aux) + x = ( + self.upsample(x)[:, :, : H * self.upscale, : W * self.upscale] + + bicubic[:, :, : H * self.upscale, : W * self.upscale] + ) + x = self.conv_last(x) + aux = aux / self.img_range + self.mean + elif self.upsampler == "pixelshuffle_hf": + # for classical SR with HF + x = self.conv_first(x) + x = self.conv_after_body(self.forward_features(x)) + x + x_before = self.conv_before_upsample(x) + x_out = self.conv_last(self.upsample(x_before)) + + x_hf = self.conv_first_hf(x_before) + x_hf = self.conv_after_body_hf(self.forward_features_hf(x_hf)) + x_hf + x_hf = self.conv_before_upsample_hf(x_hf) + x_hf = self.conv_last_hf(self.upsample_hf(x_hf)) + x = x_out + x_hf + x_hf = x_hf / self.img_range + self.mean + + elif self.upsampler == "pixelshuffledirect": + # for lightweight SR + x = self.conv_first(x) + x = self.conv_after_body(self.forward_features(x)) + x + x = self.upsample(x) + elif self.upsampler == "nearest+conv": + # for real-world SR + x = self.conv_first(x) + x = self.conv_after_body(self.forward_features(x)) + x + x = self.conv_before_upsample(x) + x = self.lrelu( + self.conv_up1( + torch.nn.functional.interpolate(x, scale_factor=2, mode="nearest") + ) + ) + x = self.lrelu( + self.conv_up2( + torch.nn.functional.interpolate(x, scale_factor=2, mode="nearest") + ) + ) + x = self.conv_last(self.lrelu(self.conv_hr(x))) + else: + # for image denoising and JPEG compression artifact reduction + x_first = self.conv_first(x) + res = self.conv_after_body(self.forward_features(x_first)) + x_first + x = x + self.conv_last(res) + + x = x / self.img_range + self.mean + if self.upsampler == "pixelshuffle_aux": + # NOTE: I removed an "aux" output here. not sure what that was for + return x[:, :, : H * self.upscale, : W * self.upscale] # type: ignore + + elif self.upsampler == "pixelshuffle_hf": + x_out = x_out / self.img_range + self.mean # type: ignore + return x_out[:, :, : H * self.upscale, : W * self.upscale], x[:, :, : H * self.upscale, : W * self.upscale], x_hf[:, :, : H * self.upscale, : W * self.upscale] # type: ignore + + else: + return x[:, :, : H * self.upscale, : W * self.upscale] + + def flops(self): + flops = 0 + H, W = self.patches_resolution + flops += H * W * 3 * self.embed_dim * 9 + flops += self.patch_embed.flops() + for i, layer in enumerate(self.layers): + flops += layer.flops() # type: ignore + flops += H * W * 3 * self.embed_dim * self.embed_dim + flops += self.upsample.flops() # type: ignore + return flops diff --git a/ldm_patched/pfn/architecture/SwinIR.py b/ldm_patched/pfn/architecture/SwinIR.py new file mode 100644 index 0000000000000000000000000000000000000000..439dcbcb2b12f7ff27a01490f4c2ae7b6e4eab9e --- /dev/null +++ b/ldm_patched/pfn/architecture/SwinIR.py @@ -0,0 +1,1224 @@ +# pylint: skip-file +# ----------------------------------------------------------------------------------- +# SwinIR: Image Restoration Using Swin Transformer, https://arxiv.org/abs/2108.10257 +# Originally Written by Ze Liu, Modified by Jingyun Liang. +# ----------------------------------------------------------------------------------- + +import math +import re + +import torch +import torch.nn as nn +import torch.nn.functional as F +import torch.utils.checkpoint as checkpoint + +# Originally from the timm package +from .timm.drop import DropPath +from .timm.helpers import to_2tuple +from .timm.weight_init import trunc_normal_ + + +class Mlp(nn.Module): + def __init__( + self, + in_features, + hidden_features=None, + out_features=None, + act_layer=nn.GELU, + drop=0.0, + ): + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Linear(in_features, hidden_features) + self.act = act_layer() + self.fc2 = nn.Linear(hidden_features, out_features) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + + +def window_partition(x, window_size): + """ + Args: + x: (B, H, W, C) + window_size (int): window size + + Returns: + windows: (num_windows*B, window_size, window_size, C) + """ + B, H, W, C = x.shape + x = x.view(B, H // window_size, window_size, W // window_size, window_size, C) + windows = ( + x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, window_size, window_size, C) + ) + return windows + + +def window_reverse(windows, window_size, H, W): + """ + Args: + windows: (num_windows*B, window_size, window_size, C) + window_size (int): Window size + H (int): Height of image + W (int): Width of image + + Returns: + x: (B, H, W, C) + """ + B = int(windows.shape[0] / (H * W / window_size / window_size)) + x = windows.view( + B, H // window_size, W // window_size, window_size, window_size, -1 + ) + x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(B, H, W, -1) + return x + + +class WindowAttention(nn.Module): + r"""Window based multi-head self attention (W-MSA) module with relative position bias. + It supports both of shifted and non-shifted window. + + Args: + dim (int): Number of input channels. + window_size (tuple[int]): The height and width of the window. + num_heads (int): Number of attention heads. + qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True + qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set + attn_drop (float, optional): Dropout ratio of attention weight. Default: 0.0 + proj_drop (float, optional): Dropout ratio of output. Default: 0.0 + """ + + def __init__( + self, + dim, + window_size, + num_heads, + qkv_bias=True, + qk_scale=None, + attn_drop=0.0, + proj_drop=0.0, + ): + super().__init__() + self.dim = dim + self.window_size = window_size # Wh, Ww + self.num_heads = num_heads + head_dim = dim // num_heads + self.scale = qk_scale or head_dim**-0.5 + + # define a parameter table of relative position bias + self.relative_position_bias_table = nn.Parameter( # type: ignore + torch.zeros((2 * window_size[0] - 1) * (2 * window_size[1] - 1), num_heads) + ) # 2*Wh-1 * 2*Ww-1, nH + + # get pair-wise relative position index for each token inside the window + coords_h = torch.arange(self.window_size[0]) + coords_w = torch.arange(self.window_size[1]) + coords = torch.stack(torch.meshgrid([coords_h, coords_w])) # 2, Wh, Ww + coords_flatten = torch.flatten(coords, 1) # 2, Wh*Ww + relative_coords = ( + coords_flatten[:, :, None] - coords_flatten[:, None, :] + ) # 2, Wh*Ww, Wh*Ww + relative_coords = relative_coords.permute( + 1, 2, 0 + ).contiguous() # Wh*Ww, Wh*Ww, 2 + relative_coords[:, :, 0] += self.window_size[0] - 1 # shift to start from 0 + relative_coords[:, :, 1] += self.window_size[1] - 1 + relative_coords[:, :, 0] *= 2 * self.window_size[1] - 1 + relative_position_index = relative_coords.sum(-1) # Wh*Ww, Wh*Ww + self.register_buffer("relative_position_index", relative_position_index) + + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(dim, dim) + + self.proj_drop = nn.Dropout(proj_drop) + + trunc_normal_(self.relative_position_bias_table, std=0.02) + self.softmax = nn.Softmax(dim=-1) + + def forward(self, x, mask=None): + """ + Args: + x: input features with shape of (num_windows*B, N, C) + mask: (0/-inf) mask with shape of (num_windows, Wh*Ww, Wh*Ww) or None + """ + B_, N, C = x.shape + qkv = ( + self.qkv(x) + .reshape(B_, N, 3, self.num_heads, C // self.num_heads) + .permute(2, 0, 3, 1, 4) + ) + q, k, v = ( + qkv[0], + qkv[1], + qkv[2], + ) # make torchscript happy (cannot use tensor as tuple) + + q = q * self.scale + attn = q @ k.transpose(-2, -1) + + relative_position_bias = self.relative_position_bias_table[ + self.relative_position_index.view(-1) # type: ignore + ].view( + self.window_size[0] * self.window_size[1], + self.window_size[0] * self.window_size[1], + -1, + ) # Wh*Ww,Wh*Ww,nH + relative_position_bias = relative_position_bias.permute( + 2, 0, 1 + ).contiguous() # nH, Wh*Ww, Wh*Ww + attn = attn + relative_position_bias.unsqueeze(0) + + if mask is not None: + nW = mask.shape[0] + attn = attn.view(B_ // nW, nW, self.num_heads, N, N) + mask.unsqueeze( + 1 + ).unsqueeze(0) + attn = attn.view(-1, self.num_heads, N, N) + attn = self.softmax(attn) + else: + attn = self.softmax(attn) + + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B_, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + + def extra_repr(self) -> str: + return f"dim={self.dim}, window_size={self.window_size}, num_heads={self.num_heads}" + + def flops(self, N): + # calculate flops for 1 window with token length of N + flops = 0 + # qkv = self.qkv(x) + flops += N * self.dim * 3 * self.dim + # attn = (q @ k.transpose(-2, -1)) + flops += self.num_heads * N * (self.dim // self.num_heads) * N + # x = (attn @ v) + flops += self.num_heads * N * N * (self.dim // self.num_heads) + # x = self.proj(x) + flops += N * self.dim * self.dim + return flops + + +class SwinTransformerBlock(nn.Module): + r"""Swin Transformer Block. + + Args: + dim (int): Number of input channels. + input_resolution (tuple[int]): Input resulotion. + num_heads (int): Number of attention heads. + window_size (int): Window size. + shift_size (int): Shift size for SW-MSA. + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. + qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True + qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set. + drop (float, optional): Dropout rate. Default: 0.0 + attn_drop (float, optional): Attention dropout rate. Default: 0.0 + drop_path (float, optional): Stochastic depth rate. Default: 0.0 + act_layer (nn.Module, optional): Activation layer. Default: nn.GELU + norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm + """ + + def __init__( + self, + dim, + input_resolution, + num_heads, + window_size=7, + shift_size=0, + mlp_ratio=4.0, + qkv_bias=True, + qk_scale=None, + drop=0.0, + attn_drop=0.0, + drop_path=0.0, + act_layer=nn.GELU, + norm_layer=nn.LayerNorm, + ): + super().__init__() + self.dim = dim + self.input_resolution = input_resolution + self.num_heads = num_heads + self.window_size = window_size + self.shift_size = shift_size + self.mlp_ratio = mlp_ratio + if min(self.input_resolution) <= self.window_size: + # if window size is larger than input resolution, we don't partition windows + self.shift_size = 0 + self.window_size = min(self.input_resolution) + assert ( + 0 <= self.shift_size < self.window_size + ), "shift_size must in 0-window_size" + + self.norm1 = norm_layer(dim) + self.attn = WindowAttention( + dim, + window_size=to_2tuple(self.window_size), + num_heads=num_heads, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + attn_drop=attn_drop, + proj_drop=drop, + ) + + self.drop_path = DropPath(drop_path) if drop_path > 0.0 else nn.Identity() + self.norm2 = norm_layer(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = Mlp( + in_features=dim, + hidden_features=mlp_hidden_dim, + act_layer=act_layer, + drop=drop, + ) + + if self.shift_size > 0: + attn_mask = self.calculate_mask(self.input_resolution) + else: + attn_mask = None + + self.register_buffer("attn_mask", attn_mask) + + def calculate_mask(self, x_size): + # calculate attention mask for SW-MSA + H, W = x_size + img_mask = torch.zeros((1, H, W, 1)) # 1 H W 1 + h_slices = ( + slice(0, -self.window_size), + slice(-self.window_size, -self.shift_size), + slice(-self.shift_size, None), + ) + w_slices = ( + slice(0, -self.window_size), + slice(-self.window_size, -self.shift_size), + slice(-self.shift_size, None), + ) + cnt = 0 + for h in h_slices: + for w in w_slices: + img_mask[:, h, w, :] = cnt + cnt += 1 + + mask_windows = window_partition( + img_mask, self.window_size + ) # nW, window_size, window_size, 1 + mask_windows = mask_windows.view(-1, self.window_size * self.window_size) + attn_mask = mask_windows.unsqueeze(1) - mask_windows.unsqueeze(2) + attn_mask = attn_mask.masked_fill(attn_mask != 0, float(-100.0)).masked_fill( + attn_mask == 0, float(0.0) + ) + + return attn_mask + + def forward(self, x, x_size): + H, W = x_size + B, L, C = x.shape + # assert L == H * W, "input feature has wrong size" + + shortcut = x + x = self.norm1(x) + x = x.view(B, H, W, C) + + # cyclic shift + if self.shift_size > 0: + shifted_x = torch.roll( + x, shifts=(-self.shift_size, -self.shift_size), dims=(1, 2) + ) + else: + shifted_x = x + + # partition windows + x_windows = window_partition( + shifted_x, self.window_size + ) # nW*B, window_size, window_size, C + x_windows = x_windows.view( + -1, self.window_size * self.window_size, C + ) # nW*B, window_size*window_size, C + + # W-MSA/SW-MSA (to be compatible for testing on images whose shapes are the multiple of window size + if self.input_resolution == x_size: + attn_windows = self.attn( + x_windows, mask=self.attn_mask + ) # nW*B, window_size*window_size, C + else: + attn_windows = self.attn( + x_windows, mask=self.calculate_mask(x_size).to(x.device) + ) + + # merge windows + attn_windows = attn_windows.view(-1, self.window_size, self.window_size, C) + shifted_x = window_reverse(attn_windows, self.window_size, H, W) # B H' W' C + + # reverse cyclic shift + if self.shift_size > 0: + x = torch.roll( + shifted_x, shifts=(self.shift_size, self.shift_size), dims=(1, 2) + ) + else: + x = shifted_x + x = x.view(B, H * W, C) + + # FFN + x = shortcut + self.drop_path(x) + x = x + self.drop_path(self.mlp(self.norm2(x))) + + return x + + def extra_repr(self) -> str: + return ( + f"dim={self.dim}, input_resolution={self.input_resolution}, num_heads={self.num_heads}, " + f"window_size={self.window_size}, shift_size={self.shift_size}, mlp_ratio={self.mlp_ratio}" + ) + + def flops(self): + flops = 0 + H, W = self.input_resolution + # norm1 + flops += self.dim * H * W + # W-MSA/SW-MSA + nW = H * W / self.window_size / self.window_size + flops += nW * self.attn.flops(self.window_size * self.window_size) + # mlp + flops += 2 * H * W * self.dim * self.dim * self.mlp_ratio + # norm2 + flops += self.dim * H * W + return flops + + +class PatchMerging(nn.Module): + r"""Patch Merging Layer. + + Args: + input_resolution (tuple[int]): Resolution of input feature. + dim (int): Number of input channels. + norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm + """ + + def __init__(self, input_resolution, dim, norm_layer=nn.LayerNorm): + super().__init__() + self.input_resolution = input_resolution + self.dim = dim + self.reduction = nn.Linear(4 * dim, 2 * dim, bias=False) + self.norm = norm_layer(4 * dim) + + def forward(self, x): + """ + x: B, H*W, C + """ + H, W = self.input_resolution + B, L, C = x.shape + assert L == H * W, "input feature has wrong size" + assert H % 2 == 0 and W % 2 == 0, f"x size ({H}*{W}) are not even." + + x = x.view(B, H, W, C) + + x0 = x[:, 0::2, 0::2, :] # B H/2 W/2 C + x1 = x[:, 1::2, 0::2, :] # B H/2 W/2 C + x2 = x[:, 0::2, 1::2, :] # B H/2 W/2 C + x3 = x[:, 1::2, 1::2, :] # B H/2 W/2 C + x = torch.cat([x0, x1, x2, x3], -1) # B H/2 W/2 4*C + x = x.view(B, -1, 4 * C) # B H/2*W/2 4*C + + x = self.norm(x) + x = self.reduction(x) + + return x + + def extra_repr(self) -> str: + return f"input_resolution={self.input_resolution}, dim={self.dim}" + + def flops(self): + H, W = self.input_resolution + flops = H * W * self.dim + flops += (H // 2) * (W // 2) * 4 * self.dim * 2 * self.dim + return flops + + +class BasicLayer(nn.Module): + """A basic Swin Transformer layer for one stage. + + Args: + dim (int): Number of input channels. + input_resolution (tuple[int]): Input resolution. + depth (int): Number of blocks. + num_heads (int): Number of attention heads. + window_size (int): Local window size. + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. + qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True + qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set. + drop (float, optional): Dropout rate. Default: 0.0 + attn_drop (float, optional): Attention dropout rate. Default: 0.0 + drop_path (float | tuple[float], optional): Stochastic depth rate. Default: 0.0 + norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm + downsample (nn.Module | None, optional): Downsample layer at the end of the layer. Default: None + use_checkpoint (bool): Whether to use checkpointing to save memory. Default: False. + """ + + def __init__( + self, + dim, + input_resolution, + depth, + num_heads, + window_size, + mlp_ratio=4.0, + qkv_bias=True, + qk_scale=None, + drop=0.0, + attn_drop=0.0, + drop_path=0.0, + norm_layer=nn.LayerNorm, + downsample=None, + use_checkpoint=False, + ): + super().__init__() + self.dim = dim + self.input_resolution = input_resolution + self.depth = depth + self.use_checkpoint = use_checkpoint + + # build blocks + self.blocks = nn.ModuleList( + [ + SwinTransformerBlock( + dim=dim, + input_resolution=input_resolution, + num_heads=num_heads, + window_size=window_size, + shift_size=0 if (i % 2 == 0) else window_size // 2, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + drop=drop, + attn_drop=attn_drop, + drop_path=drop_path[i] + if isinstance(drop_path, list) + else drop_path, + norm_layer=norm_layer, + ) + for i in range(depth) + ] + ) + + # patch merging layer + if downsample is not None: + self.downsample = downsample( + input_resolution, dim=dim, norm_layer=norm_layer + ) + else: + self.downsample = None + + def forward(self, x, x_size): + for blk in self.blocks: + if self.use_checkpoint: + x = checkpoint.checkpoint(blk, x, x_size) + else: + x = blk(x, x_size) + if self.downsample is not None: + x = self.downsample(x) + return x + + def extra_repr(self) -> str: + return f"dim={self.dim}, input_resolution={self.input_resolution}, depth={self.depth}" + + def flops(self): + flops = 0 + for blk in self.blocks: + flops += blk.flops() # type: ignore + if self.downsample is not None: + flops += self.downsample.flops() + return flops + + +class RSTB(nn.Module): + """Residual Swin Transformer Block (RSTB). + + Args: + dim (int): Number of input channels. + input_resolution (tuple[int]): Input resolution. + depth (int): Number of blocks. + num_heads (int): Number of attention heads. + window_size (int): Local window size. + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. + qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True + qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set. + drop (float, optional): Dropout rate. Default: 0.0 + attn_drop (float, optional): Attention dropout rate. Default: 0.0 + drop_path (float | tuple[float], optional): Stochastic depth rate. Default: 0.0 + norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm + downsample (nn.Module | None, optional): Downsample layer at the end of the layer. Default: None + use_checkpoint (bool): Whether to use checkpointing to save memory. Default: False. + img_size: Input image size. + patch_size: Patch size. + resi_connection: The convolutional block before residual connection. + """ + + def __init__( + self, + dim, + input_resolution, + depth, + num_heads, + window_size, + mlp_ratio=4.0, + qkv_bias=True, + qk_scale=None, + drop=0.0, + attn_drop=0.0, + drop_path=0.0, + norm_layer=nn.LayerNorm, + downsample=None, + use_checkpoint=False, + img_size=224, + patch_size=4, + resi_connection="1conv", + ): + super(RSTB, self).__init__() + + self.dim = dim + self.input_resolution = input_resolution + + self.residual_group = BasicLayer( + dim=dim, + input_resolution=input_resolution, + depth=depth, + num_heads=num_heads, + window_size=window_size, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + drop=drop, + attn_drop=attn_drop, + drop_path=drop_path, + norm_layer=norm_layer, + downsample=downsample, + use_checkpoint=use_checkpoint, + ) + + if resi_connection == "1conv": + self.conv = nn.Conv2d(dim, dim, 3, 1, 1) + elif resi_connection == "3conv": + # to save parameters and memory + self.conv = nn.Sequential( + nn.Conv2d(dim, dim // 4, 3, 1, 1), + nn.LeakyReLU(negative_slope=0.2, inplace=True), + nn.Conv2d(dim // 4, dim // 4, 1, 1, 0), + nn.LeakyReLU(negative_slope=0.2, inplace=True), + nn.Conv2d(dim // 4, dim, 3, 1, 1), + ) + + self.patch_embed = PatchEmbed( + img_size=img_size, + patch_size=patch_size, + in_chans=0, + embed_dim=dim, + norm_layer=None, + ) + + self.patch_unembed = PatchUnEmbed( + img_size=img_size, + patch_size=patch_size, + in_chans=0, + embed_dim=dim, + norm_layer=None, + ) + + def forward(self, x, x_size): + return ( + self.patch_embed( + self.conv(self.patch_unembed(self.residual_group(x, x_size), x_size)) + ) + + x + ) + + def flops(self): + flops = 0 + flops += self.residual_group.flops() + H, W = self.input_resolution + flops += H * W * self.dim * self.dim * 9 + flops += self.patch_embed.flops() + flops += self.patch_unembed.flops() + + return flops + + +class PatchEmbed(nn.Module): + r"""Image to Patch Embedding + + Args: + img_size (int): Image size. Default: 224. + patch_size (int): Patch token size. Default: 4. + in_chans (int): Number of input image channels. Default: 3. + embed_dim (int): Number of linear projection output channels. Default: 96. + norm_layer (nn.Module, optional): Normalization layer. Default: None + """ + + def __init__( + self, img_size=224, patch_size=4, in_chans=3, embed_dim=96, norm_layer=None + ): + super().__init__() + img_size = to_2tuple(img_size) + patch_size = to_2tuple(patch_size) + patches_resolution = [ + img_size[0] // patch_size[0], # type: ignore + img_size[1] // patch_size[1], # type: ignore + ] + self.img_size = img_size + self.patch_size = patch_size + self.patches_resolution = patches_resolution + self.num_patches = patches_resolution[0] * patches_resolution[1] + + self.in_chans = in_chans + self.embed_dim = embed_dim + + if norm_layer is not None: + self.norm = norm_layer(embed_dim) + else: + self.norm = None + + def forward(self, x): + x = x.flatten(2).transpose(1, 2) # B Ph*Pw C + if self.norm is not None: + x = self.norm(x) + return x + + def flops(self): + flops = 0 + H, W = self.img_size + if self.norm is not None: + flops += H * W * self.embed_dim # type: ignore + return flops + + +class PatchUnEmbed(nn.Module): + r"""Image to Patch Unembedding + + Args: + img_size (int): Image size. Default: 224. + patch_size (int): Patch token size. Default: 4. + in_chans (int): Number of input image channels. Default: 3. + embed_dim (int): Number of linear projection output channels. Default: 96. + norm_layer (nn.Module, optional): Normalization layer. Default: None + """ + + def __init__( + self, img_size=224, patch_size=4, in_chans=3, embed_dim=96, norm_layer=None + ): + super().__init__() + img_size = to_2tuple(img_size) + patch_size = to_2tuple(patch_size) + patches_resolution = [ + img_size[0] // patch_size[0], # type: ignore + img_size[1] // patch_size[1], # type: ignore + ] + self.img_size = img_size + self.patch_size = patch_size + self.patches_resolution = patches_resolution + self.num_patches = patches_resolution[0] * patches_resolution[1] + + self.in_chans = in_chans + self.embed_dim = embed_dim + + def forward(self, x, x_size): + B, HW, C = x.shape + x = x.transpose(1, 2).view(B, self.embed_dim, x_size[0], x_size[1]) # B Ph*Pw C + return x + + def flops(self): + flops = 0 + return flops + + +class Upsample(nn.Sequential): + """Upsample module. + + Args: + scale (int): Scale factor. Supported scales: 2^n and 3. + num_feat (int): Channel number of intermediate features. + """ + + def __init__(self, scale, num_feat): + m = [] + if (scale & (scale - 1)) == 0: # scale = 2^n + for _ in range(int(math.log(scale, 2))): + m.append(nn.Conv2d(num_feat, 4 * num_feat, 3, 1, 1)) + m.append(nn.PixelShuffle(2)) + elif scale == 3: + m.append(nn.Conv2d(num_feat, 9 * num_feat, 3, 1, 1)) + m.append(nn.PixelShuffle(3)) + else: + raise ValueError( + f"scale {scale} is not supported. " "Supported scales: 2^n and 3." + ) + super(Upsample, self).__init__(*m) + + +class UpsampleOneStep(nn.Sequential): + """UpsampleOneStep module (the difference with Upsample is that it always only has 1conv + 1pixelshuffle) + Used in lightweight SR to save parameters. + + Args: + scale (int): Scale factor. Supported scales: 2^n and 3. + num_feat (int): Channel number of intermediate features. + + """ + + def __init__(self, scale, num_feat, num_out_ch, input_resolution=None): + self.num_feat = num_feat + self.input_resolution = input_resolution + m = [] + m.append(nn.Conv2d(num_feat, (scale**2) * num_out_ch, 3, 1, 1)) + m.append(nn.PixelShuffle(scale)) + super(UpsampleOneStep, self).__init__(*m) + + def flops(self): + H, W = self.input_resolution # type: ignore + flops = H * W * self.num_feat * 3 * 9 + return flops + + +class SwinIR(nn.Module): + r"""SwinIR + A PyTorch impl of : `SwinIR: Image Restoration Using Swin Transformer`, based on Swin Transformer. + + Args: + img_size (int | tuple(int)): Input image size. Default 64 + patch_size (int | tuple(int)): Patch size. Default: 1 + in_chans (int): Number of input image channels. Default: 3 + embed_dim (int): Patch embedding dimension. Default: 96 + depths (tuple(int)): Depth of each Swin Transformer layer. + num_heads (tuple(int)): Number of attention heads in different layers. + window_size (int): Window size. Default: 7 + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. Default: 4 + qkv_bias (bool): If True, add a learnable bias to query, key, value. Default: True + qk_scale (float): Override default qk scale of head_dim ** -0.5 if set. Default: None + drop_rate (float): Dropout rate. Default: 0 + attn_drop_rate (float): Attention dropout rate. Default: 0 + drop_path_rate (float): Stochastic depth rate. Default: 0.1 + norm_layer (nn.Module): Normalization layer. Default: nn.LayerNorm. + ape (bool): If True, add absolute position embedding to the patch embedding. Default: False + patch_norm (bool): If True, add normalization after patch embedding. Default: True + use_checkpoint (bool): Whether to use checkpointing to save memory. Default: False + upscale: Upscale factor. 2/3/4/8 for image SR, 1 for denoising and compress artifact reduction + img_range: Image range. 1. or 255. + upsampler: The reconstruction reconstruction module. 'pixelshuffle'/'pixelshuffledirect'/'nearest+conv'/None + resi_connection: The convolutional block before residual connection. '1conv'/'3conv' + """ + + def __init__( + self, + state_dict, + **kwargs, + ): + super(SwinIR, self).__init__() + + # Defaults + img_size = 64 + patch_size = 1 + in_chans = 3 + embed_dim = 96 + depths = [6, 6, 6, 6] + num_heads = [6, 6, 6, 6] + window_size = 7 + mlp_ratio = 4.0 + qkv_bias = True + qk_scale = None + drop_rate = 0.0 + attn_drop_rate = 0.0 + drop_path_rate = 0.1 + norm_layer = nn.LayerNorm + ape = False + patch_norm = True + use_checkpoint = False + upscale = 2 + img_range = 1.0 + upsampler = "" + resi_connection = "1conv" + num_feat = 64 + num_in_ch = in_chans + num_out_ch = in_chans + supports_fp16 = True + self.start_unshuffle = 1 + + self.model_arch = "SwinIR" + self.sub_type = "SR" + self.state = state_dict + if "params_ema" in self.state: + self.state = self.state["params_ema"] + elif "params" in self.state: + self.state = self.state["params"] + + state_keys = self.state.keys() + + if "conv_before_upsample.0.weight" in state_keys: + if "conv_up1.weight" in state_keys: + upsampler = "nearest+conv" + else: + upsampler = "pixelshuffle" + supports_fp16 = False + elif "upsample.0.weight" in state_keys: + upsampler = "pixelshuffledirect" + else: + upsampler = "" + + num_feat = ( + self.state.get("conv_before_upsample.0.weight", None).shape[1] + if self.state.get("conv_before_upsample.weight", None) + else 64 + ) + + if "conv_first.1.weight" in self.state: + self.state["conv_first.weight"] = self.state.pop("conv_first.1.weight") + self.state["conv_first.bias"] = self.state.pop("conv_first.1.bias") + self.start_unshuffle = round(math.sqrt(self.state["conv_first.weight"].shape[1] // 3)) + + num_in_ch = self.state["conv_first.weight"].shape[1] + in_chans = num_in_ch + if "conv_last.weight" in state_keys: + num_out_ch = self.state["conv_last.weight"].shape[0] + else: + num_out_ch = num_in_ch + + upscale = 1 + if upsampler == "nearest+conv": + upsample_keys = [ + x for x in state_keys if "conv_up" in x and "bias" not in x + ] + + for upsample_key in upsample_keys: + upscale *= 2 + elif upsampler == "pixelshuffle": + upsample_keys = [ + x + for x in state_keys + if "upsample" in x and "conv" not in x and "bias" not in x + ] + for upsample_key in upsample_keys: + shape = self.state[upsample_key].shape[0] + upscale *= math.sqrt(shape // num_feat) + upscale = int(upscale) + elif upsampler == "pixelshuffledirect": + upscale = int( + math.sqrt(self.state["upsample.0.bias"].shape[0] // num_out_ch) + ) + + max_layer_num = 0 + max_block_num = 0 + for key in state_keys: + result = re.match( + r"layers.(\d*).residual_group.blocks.(\d*).norm1.weight", key + ) + if result: + layer_num, block_num = result.groups() + max_layer_num = max(max_layer_num, int(layer_num)) + max_block_num = max(max_block_num, int(block_num)) + + depths = [max_block_num + 1 for _ in range(max_layer_num + 1)] + + if ( + "layers.0.residual_group.blocks.0.attn.relative_position_bias_table" + in state_keys + ): + num_heads_num = self.state[ + "layers.0.residual_group.blocks.0.attn.relative_position_bias_table" + ].shape[-1] + num_heads = [num_heads_num for _ in range(max_layer_num + 1)] + else: + num_heads = depths + + embed_dim = self.state["conv_first.weight"].shape[0] + + mlp_ratio = float( + self.state["layers.0.residual_group.blocks.0.mlp.fc1.bias"].shape[0] + / embed_dim + ) + + # TODO: could actually count the layers, but this should do + if "layers.0.conv.4.weight" in state_keys: + resi_connection = "3conv" + else: + resi_connection = "1conv" + + window_size = int( + math.sqrt( + self.state[ + "layers.0.residual_group.blocks.0.attn.relative_position_index" + ].shape[0] + ) + ) + + if "layers.0.residual_group.blocks.1.attn_mask" in state_keys: + img_size = int( + math.sqrt( + self.state["layers.0.residual_group.blocks.1.attn_mask"].shape[0] + ) + * window_size + ) + + # The JPEG models are the only ones with window-size 7, and they also use this range + img_range = 255.0 if window_size == 7 else 1.0 + + self.in_nc = num_in_ch + self.out_nc = num_out_ch + self.num_feat = num_feat + self.embed_dim = embed_dim + self.num_heads = num_heads + self.depths = depths + self.window_size = window_size + self.mlp_ratio = mlp_ratio + self.scale = upscale / self.start_unshuffle + self.upsampler = upsampler + self.img_size = img_size + self.img_range = img_range + self.resi_connection = resi_connection + + self.supports_fp16 = False # Too much weirdness to support this at the moment + self.supports_bfp16 = True + self.min_size_restriction = 16 + + self.img_range = img_range + if in_chans == 3: + rgb_mean = (0.4488, 0.4371, 0.4040) + self.mean = torch.Tensor(rgb_mean).view(1, 3, 1, 1) + else: + self.mean = torch.zeros(1, 1, 1, 1) + self.upscale = upscale + self.upsampler = upsampler + self.window_size = window_size + + ##################################################################################################### + ################################### 1, shallow feature extraction ################################### + self.conv_first = nn.Conv2d(num_in_ch, embed_dim, 3, 1, 1) + + ##################################################################################################### + ################################### 2, deep feature extraction ###################################### + self.num_layers = len(depths) + self.embed_dim = embed_dim + self.ape = ape + self.patch_norm = patch_norm + self.num_features = embed_dim + self.mlp_ratio = mlp_ratio + + # split image into non-overlapping patches + self.patch_embed = PatchEmbed( + img_size=img_size, + patch_size=patch_size, + in_chans=embed_dim, + embed_dim=embed_dim, + norm_layer=norm_layer if self.patch_norm else None, + ) + num_patches = self.patch_embed.num_patches + patches_resolution = self.patch_embed.patches_resolution + self.patches_resolution = patches_resolution + + # merge non-overlapping patches into image + self.patch_unembed = PatchUnEmbed( + img_size=img_size, + patch_size=patch_size, + in_chans=embed_dim, + embed_dim=embed_dim, + norm_layer=norm_layer if self.patch_norm else None, + ) + + # absolute position embedding + if self.ape: + self.absolute_pos_embed = nn.Parameter( # type: ignore + torch.zeros(1, num_patches, embed_dim) + ) + trunc_normal_(self.absolute_pos_embed, std=0.02) + + self.pos_drop = nn.Dropout(p=drop_rate) + + # stochastic depth + dpr = [ + x.item() for x in torch.linspace(0, drop_path_rate, sum(depths)) + ] # stochastic depth decay rule + + # build Residual Swin Transformer blocks (RSTB) + self.layers = nn.ModuleList() + for i_layer in range(self.num_layers): + layer = RSTB( + dim=embed_dim, + input_resolution=(patches_resolution[0], patches_resolution[1]), + depth=depths[i_layer], + num_heads=num_heads[i_layer], + window_size=window_size, + mlp_ratio=self.mlp_ratio, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + drop=drop_rate, + attn_drop=attn_drop_rate, + drop_path=dpr[ + sum(depths[:i_layer]) : sum(depths[: i_layer + 1]) # type: ignore + ], # no impact on SR results + norm_layer=norm_layer, + downsample=None, + use_checkpoint=use_checkpoint, + img_size=img_size, + patch_size=patch_size, + resi_connection=resi_connection, + ) + self.layers.append(layer) + self.norm = norm_layer(self.num_features) + + # build the last conv layer in deep feature extraction + if resi_connection == "1conv": + self.conv_after_body = nn.Conv2d(embed_dim, embed_dim, 3, 1, 1) + elif resi_connection == "3conv": + # to save parameters and memory + self.conv_after_body = nn.Sequential( + nn.Conv2d(embed_dim, embed_dim // 4, 3, 1, 1), + nn.LeakyReLU(negative_slope=0.2, inplace=True), + nn.Conv2d(embed_dim // 4, embed_dim // 4, 1, 1, 0), + nn.LeakyReLU(negative_slope=0.2, inplace=True), + nn.Conv2d(embed_dim // 4, embed_dim, 3, 1, 1), + ) + + ##################################################################################################### + ################################ 3, high quality image reconstruction ################################ + if self.upsampler == "pixelshuffle": + # for classical SR + self.conv_before_upsample = nn.Sequential( + nn.Conv2d(embed_dim, num_feat, 3, 1, 1), nn.LeakyReLU(inplace=True) + ) + self.upsample = Upsample(upscale, num_feat) + self.conv_last = nn.Conv2d(num_feat, num_out_ch, 3, 1, 1) + elif self.upsampler == "pixelshuffledirect": + # for lightweight SR (to save parameters) + self.upsample = UpsampleOneStep( + upscale, + embed_dim, + num_out_ch, + (patches_resolution[0], patches_resolution[1]), + ) + elif self.upsampler == "nearest+conv": + # for real-world SR (less artifacts) + self.conv_before_upsample = nn.Sequential( + nn.Conv2d(embed_dim, num_feat, 3, 1, 1), nn.LeakyReLU(inplace=True) + ) + self.conv_up1 = nn.Conv2d(num_feat, num_feat, 3, 1, 1) + if self.upscale == 4: + self.conv_up2 = nn.Conv2d(num_feat, num_feat, 3, 1, 1) + elif self.upscale == 8: + self.conv_up2 = nn.Conv2d(num_feat, num_feat, 3, 1, 1) + self.conv_up3 = nn.Conv2d(num_feat, num_feat, 3, 1, 1) + self.conv_hr = nn.Conv2d(num_feat, num_feat, 3, 1, 1) + self.conv_last = nn.Conv2d(num_feat, num_out_ch, 3, 1, 1) + self.lrelu = nn.LeakyReLU(negative_slope=0.2, inplace=True) + else: + # for image denoising and JPEG compression artifact reduction + self.conv_last = nn.Conv2d(embed_dim, num_out_ch, 3, 1, 1) + + self.apply(self._init_weights) + self.load_state_dict(self.state, strict=False) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=0.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + @torch.jit.ignore # type: ignore + def no_weight_decay(self): + return {"absolute_pos_embed"} + + @torch.jit.ignore # type: ignore + def no_weight_decay_keywords(self): + return {"relative_position_bias_table"} + + def check_image_size(self, x): + _, _, h, w = x.size() + mod_pad_h = (self.window_size - h % self.window_size) % self.window_size + mod_pad_w = (self.window_size - w % self.window_size) % self.window_size + x = F.pad(x, (0, mod_pad_w, 0, mod_pad_h), "reflect") + return x + + def forward_features(self, x): + x_size = (x.shape[2], x.shape[3]) + x = self.patch_embed(x) + if self.ape: + x = x + self.absolute_pos_embed + x = self.pos_drop(x) + + for layer in self.layers: + x = layer(x, x_size) + + x = self.norm(x) # B L C + x = self.patch_unembed(x, x_size) + + return x + + def forward(self, x): + H, W = x.shape[2:] + x = self.check_image_size(x) + + self.mean = self.mean.type_as(x) + x = (x - self.mean) * self.img_range + + if self.start_unshuffle > 1: + x = torch.nn.functional.pixel_unshuffle(x, self.start_unshuffle) + + if self.upsampler == "pixelshuffle": + # for classical SR + x = self.conv_first(x) + x = self.conv_after_body(self.forward_features(x)) + x + x = self.conv_before_upsample(x) + x = self.conv_last(self.upsample(x)) + elif self.upsampler == "pixelshuffledirect": + # for lightweight SR + x = self.conv_first(x) + x = self.conv_after_body(self.forward_features(x)) + x + x = self.upsample(x) + elif self.upsampler == "nearest+conv": + # for real-world SR + x = self.conv_first(x) + x = self.conv_after_body(self.forward_features(x)) + x + x = self.conv_before_upsample(x) + x = self.lrelu( + self.conv_up1( + torch.nn.functional.interpolate(x, scale_factor=2, mode="nearest") # type: ignore + ) + ) + if self.upscale == 4: + x = self.lrelu( + self.conv_up2( + torch.nn.functional.interpolate( # type: ignore + x, scale_factor=2, mode="nearest" + ) + ) + ) + elif self.upscale == 8: + x = self.lrelu(self.conv_up2(torch.nn.functional.interpolate(x, scale_factor=2, mode='nearest'))) + x = self.lrelu(self.conv_up3(torch.nn.functional.interpolate(x, scale_factor=2, mode='nearest'))) + x = self.conv_last(self.lrelu(self.conv_hr(x))) + else: + # for image denoising and JPEG compression artifact reduction + x_first = self.conv_first(x) + res = self.conv_after_body(self.forward_features(x_first)) + x_first + x = x + self.conv_last(res) + + x = x / self.img_range + self.mean + + return x[:, :, : H * self.upscale, : W * self.upscale] + + def flops(self): + flops = 0 + H, W = self.patches_resolution + flops += H * W * 3 * self.embed_dim * 9 + flops += self.patch_embed.flops() + for i, layer in enumerate(self.layers): + flops += layer.flops() # type: ignore + flops += H * W * 3 * self.embed_dim * self.embed_dim + flops += self.upsample.flops() # type: ignore + return flops diff --git a/ldm_patched/pfn/architecture/__init__.py b/ldm_patched/pfn/architecture/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ldm_patched/pfn/architecture/__pycache__/DAT.cpython-310.pyc b/ldm_patched/pfn/architecture/__pycache__/DAT.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..74ead74f63bb382671e1c6b379c158699b2831b1 Binary files /dev/null and b/ldm_patched/pfn/architecture/__pycache__/DAT.cpython-310.pyc differ diff --git a/ldm_patched/pfn/architecture/__pycache__/HAT.cpython-310.pyc b/ldm_patched/pfn/architecture/__pycache__/HAT.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0301f07b503934b3578e8c6752bc3bd36ef696a8 Binary files /dev/null and b/ldm_patched/pfn/architecture/__pycache__/HAT.cpython-310.pyc differ diff --git a/ldm_patched/pfn/architecture/__pycache__/LaMa.cpython-310.pyc b/ldm_patched/pfn/architecture/__pycache__/LaMa.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5ec1bf5fe235c0d303448d413c369cdffc4322e4 Binary files /dev/null and b/ldm_patched/pfn/architecture/__pycache__/LaMa.cpython-310.pyc differ diff --git a/ldm_patched/pfn/architecture/__pycache__/RRDB.cpython-310.pyc b/ldm_patched/pfn/architecture/__pycache__/RRDB.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0c0ea6c6bba9c36d74b3b36a393473330dd41f1c Binary files /dev/null and b/ldm_patched/pfn/architecture/__pycache__/RRDB.cpython-310.pyc differ diff --git a/ldm_patched/pfn/architecture/__pycache__/SCUNet.cpython-310.pyc b/ldm_patched/pfn/architecture/__pycache__/SCUNet.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c9180426dd9f6bae6ca99131b3b58bdb099b1f57 Binary files /dev/null and b/ldm_patched/pfn/architecture/__pycache__/SCUNet.cpython-310.pyc differ diff --git a/ldm_patched/pfn/architecture/__pycache__/SPSR.cpython-310.pyc b/ldm_patched/pfn/architecture/__pycache__/SPSR.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9f799ccee73cf0f2af03e10d7c730df1c9e5514e Binary files /dev/null and b/ldm_patched/pfn/architecture/__pycache__/SPSR.cpython-310.pyc differ diff --git a/ldm_patched/pfn/architecture/__pycache__/SRVGG.cpython-310.pyc b/ldm_patched/pfn/architecture/__pycache__/SRVGG.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..84cb2c031b267c5dd494b3ab0d5b3244459bd315 Binary files /dev/null and b/ldm_patched/pfn/architecture/__pycache__/SRVGG.cpython-310.pyc differ diff --git a/ldm_patched/pfn/architecture/__pycache__/SwiftSRGAN.cpython-310.pyc b/ldm_patched/pfn/architecture/__pycache__/SwiftSRGAN.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b9f4c41459667619bec7e9624aeaab02b971c921 Binary files /dev/null and b/ldm_patched/pfn/architecture/__pycache__/SwiftSRGAN.cpython-310.pyc differ diff --git a/ldm_patched/pfn/architecture/__pycache__/Swin2SR.cpython-310.pyc b/ldm_patched/pfn/architecture/__pycache__/Swin2SR.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a72b23f7d1db51a479a99f8c6acf5cf9185361e2 Binary files /dev/null and b/ldm_patched/pfn/architecture/__pycache__/Swin2SR.cpython-310.pyc differ diff --git a/ldm_patched/pfn/architecture/__pycache__/SwinIR.cpython-310.pyc b/ldm_patched/pfn/architecture/__pycache__/SwinIR.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..49222c752bd250bbf118e5372ec354d548316345 Binary files /dev/null and b/ldm_patched/pfn/architecture/__pycache__/SwinIR.cpython-310.pyc differ diff --git a/ldm_patched/pfn/architecture/__pycache__/__init__.cpython-310.pyc b/ldm_patched/pfn/architecture/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a169397d3a92fb03de82e26fc41584fdae2d22f1 Binary files /dev/null and b/ldm_patched/pfn/architecture/__pycache__/__init__.cpython-310.pyc differ diff --git a/ldm_patched/pfn/architecture/__pycache__/block.cpython-310.pyc b/ldm_patched/pfn/architecture/__pycache__/block.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f36ee16473311f2d13c72ca4ee97deee3775ed5b Binary files /dev/null and b/ldm_patched/pfn/architecture/__pycache__/block.cpython-310.pyc differ diff --git a/ldm_patched/pfn/architecture/block.py b/ldm_patched/pfn/architecture/block.py new file mode 100644 index 0000000000000000000000000000000000000000..d7bc5d227008a73c40f9087da1ee3ae2ca25a896 --- /dev/null +++ b/ldm_patched/pfn/architecture/block.py @@ -0,0 +1,546 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +from __future__ import annotations + +from collections import OrderedDict +try: + from typing import Literal +except ImportError: + from typing_extensions import Literal + +import torch +import torch.nn as nn + +#################### +# Basic blocks +#################### + + +def act(act_type: str, inplace=True, neg_slope=0.2, n_prelu=1): + # helper selecting activation + # neg_slope: for leakyrelu and init of prelu + # n_prelu: for p_relu num_parameters + act_type = act_type.lower() + if act_type == "relu": + layer = nn.ReLU(inplace) + elif act_type == "leakyrelu": + layer = nn.LeakyReLU(neg_slope, inplace) + elif act_type == "prelu": + layer = nn.PReLU(num_parameters=n_prelu, init=neg_slope) + else: + raise NotImplementedError( + "activation layer [{:s}] is not found".format(act_type) + ) + return layer + + +def norm(norm_type: str, nc: int): + # helper selecting normalization layer + norm_type = norm_type.lower() + if norm_type == "batch": + layer = nn.BatchNorm2d(nc, affine=True) + elif norm_type == "instance": + layer = nn.InstanceNorm2d(nc, affine=False) + else: + raise NotImplementedError( + "normalization layer [{:s}] is not found".format(norm_type) + ) + return layer + + +def pad(pad_type: str, padding): + # helper selecting padding layer + # if padding is 'zero', do by conv layers + pad_type = pad_type.lower() + if padding == 0: + return None + if pad_type == "reflect": + layer = nn.ReflectionPad2d(padding) + elif pad_type == "replicate": + layer = nn.ReplicationPad2d(padding) + else: + raise NotImplementedError( + "padding layer [{:s}] is not implemented".format(pad_type) + ) + return layer + + +def get_valid_padding(kernel_size, dilation): + kernel_size = kernel_size + (kernel_size - 1) * (dilation - 1) + padding = (kernel_size - 1) // 2 + return padding + + +class ConcatBlock(nn.Module): + # Concat the output of a submodule to its input + def __init__(self, submodule): + super(ConcatBlock, self).__init__() + self.sub = submodule + + def forward(self, x): + output = torch.cat((x, self.sub(x)), dim=1) + return output + + def __repr__(self): + tmpstr = "Identity .. \n|" + modstr = self.sub.__repr__().replace("\n", "\n|") + tmpstr = tmpstr + modstr + return tmpstr + + +class ShortcutBlock(nn.Module): + # Elementwise sum the output of a submodule to its input + def __init__(self, submodule): + super(ShortcutBlock, self).__init__() + self.sub = submodule + + def forward(self, x): + output = x + self.sub(x) + return output + + def __repr__(self): + tmpstr = "Identity + \n|" + modstr = self.sub.__repr__().replace("\n", "\n|") + tmpstr = tmpstr + modstr + return tmpstr + + +class ShortcutBlockSPSR(nn.Module): + # Elementwise sum the output of a submodule to its input + def __init__(self, submodule): + super(ShortcutBlockSPSR, self).__init__() + self.sub = submodule + + def forward(self, x): + return x, self.sub + + def __repr__(self): + tmpstr = "Identity + \n|" + modstr = self.sub.__repr__().replace("\n", "\n|") + tmpstr = tmpstr + modstr + return tmpstr + + +def sequential(*args): + # Flatten Sequential. It unwraps nn.Sequential. + if len(args) == 1: + if isinstance(args[0], OrderedDict): + raise NotImplementedError("sequential does not support OrderedDict input.") + return args[0] # No sequential is needed. + modules = [] + for module in args: + if isinstance(module, nn.Sequential): + for submodule in module.children(): + modules.append(submodule) + elif isinstance(module, nn.Module): + modules.append(module) + return nn.Sequential(*modules) + + +ConvMode = Literal["CNA", "NAC", "CNAC"] + + +# 2x2x2 Conv Block +def conv_block_2c2( + in_nc, + out_nc, + act_type="relu", +): + return sequential( + nn.Conv2d(in_nc, out_nc, kernel_size=2, padding=1), + nn.Conv2d(out_nc, out_nc, kernel_size=2, padding=0), + act(act_type) if act_type else None, + ) + + +def conv_block( + in_nc: int, + out_nc: int, + kernel_size, + stride=1, + dilation=1, + groups=1, + bias=True, + pad_type="zero", + norm_type: str | None = None, + act_type: str | None = "relu", + mode: ConvMode = "CNA", + c2x2=False, +): + """ + Conv layer with padding, normalization, activation + mode: CNA --> Conv -> Norm -> Act + NAC --> Norm -> Act --> Conv (Identity Mappings in Deep Residual Networks, ECCV16) + """ + + if c2x2: + return conv_block_2c2(in_nc, out_nc, act_type=act_type) + + assert mode in ("CNA", "NAC", "CNAC"), "Wrong conv mode [{:s}]".format(mode) + padding = get_valid_padding(kernel_size, dilation) + p = pad(pad_type, padding) if pad_type and pad_type != "zero" else None + padding = padding if pad_type == "zero" else 0 + + c = nn.Conv2d( + in_nc, + out_nc, + kernel_size=kernel_size, + stride=stride, + padding=padding, + dilation=dilation, + bias=bias, + groups=groups, + ) + a = act(act_type) if act_type else None + if mode in ("CNA", "CNAC"): + n = norm(norm_type, out_nc) if norm_type else None + return sequential(p, c, n, a) + elif mode == "NAC": + if norm_type is None and act_type is not None: + a = act(act_type, inplace=False) + # Important! + # input----ReLU(inplace)----Conv--+----output + # |________________________| + # inplace ReLU will modify the input, therefore wrong output + n = norm(norm_type, in_nc) if norm_type else None + return sequential(n, a, p, c) + else: + assert False, f"Invalid conv mode {mode}" + + +#################### +# Useful blocks +#################### + + +class ResNetBlock(nn.Module): + """ + ResNet Block, 3-3 style + with extra residual scaling used in EDSR + (Enhanced Deep Residual Networks for Single Image Super-Resolution, CVPRW 17) + """ + + def __init__( + self, + in_nc, + mid_nc, + out_nc, + kernel_size=3, + stride=1, + dilation=1, + groups=1, + bias=True, + pad_type="zero", + norm_type=None, + act_type="relu", + mode: ConvMode = "CNA", + res_scale=1, + ): + super(ResNetBlock, self).__init__() + conv0 = conv_block( + in_nc, + mid_nc, + kernel_size, + stride, + dilation, + groups, + bias, + pad_type, + norm_type, + act_type, + mode, + ) + if mode == "CNA": + act_type = None + if mode == "CNAC": # Residual path: |-CNAC-| + act_type = None + norm_type = None + conv1 = conv_block( + mid_nc, + out_nc, + kernel_size, + stride, + dilation, + groups, + bias, + pad_type, + norm_type, + act_type, + mode, + ) + # if in_nc != out_nc: + # self.project = conv_block(in_nc, out_nc, 1, stride, dilation, 1, bias, pad_type, \ + # None, None) + # print('Need a projecter in ResNetBlock.') + # else: + # self.project = lambda x:x + self.res = sequential(conv0, conv1) + self.res_scale = res_scale + + def forward(self, x): + res = self.res(x).mul(self.res_scale) + return x + res + + +class RRDB(nn.Module): + """ + Residual in Residual Dense Block + (ESRGAN: Enhanced Super-Resolution Generative Adversarial Networks) + """ + + def __init__( + self, + nf, + kernel_size=3, + gc=32, + stride=1, + bias: bool = True, + pad_type="zero", + norm_type=None, + act_type="leakyrelu", + mode: ConvMode = "CNA", + _convtype="Conv2D", + _spectral_norm=False, + plus=False, + c2x2=False, + ): + super(RRDB, self).__init__() + self.RDB1 = ResidualDenseBlock_5C( + nf, + kernel_size, + gc, + stride, + bias, + pad_type, + norm_type, + act_type, + mode, + plus=plus, + c2x2=c2x2, + ) + self.RDB2 = ResidualDenseBlock_5C( + nf, + kernel_size, + gc, + stride, + bias, + pad_type, + norm_type, + act_type, + mode, + plus=plus, + c2x2=c2x2, + ) + self.RDB3 = ResidualDenseBlock_5C( + nf, + kernel_size, + gc, + stride, + bias, + pad_type, + norm_type, + act_type, + mode, + plus=plus, + c2x2=c2x2, + ) + + def forward(self, x): + out = self.RDB1(x) + out = self.RDB2(out) + out = self.RDB3(out) + return out * 0.2 + x + + +class ResidualDenseBlock_5C(nn.Module): + """ + Residual Dense Block + style: 5 convs + The core module of paper: (Residual Dense Network for Image Super-Resolution, CVPR 18) + Modified options that can be used: + - "Partial Convolution based Padding" arXiv:1811.11718 + - "Spectral normalization" arXiv:1802.05957 + - "ICASSP 2020 - ESRGAN+ : Further Improving ESRGAN" N. C. + {Rakotonirina} and A. {Rasoanaivo} + + Args: + nf (int): Channel number of intermediate features (num_feat). + gc (int): Channels for each growth (num_grow_ch: growth channel, + i.e. intermediate channels). + convtype (str): the type of convolution to use. Default: 'Conv2D' + gaussian_noise (bool): enable the ESRGAN+ gaussian noise (no new + trainable parameters) + plus (bool): enable the additional residual paths from ESRGAN+ + (adds trainable parameters) + """ + + def __init__( + self, + nf=64, + kernel_size=3, + gc=32, + stride=1, + bias: bool = True, + pad_type="zero", + norm_type=None, + act_type="leakyrelu", + mode: ConvMode = "CNA", + plus=False, + c2x2=False, + ): + super(ResidualDenseBlock_5C, self).__init__() + + ## + + self.conv1x1 = conv1x1(nf, gc) if plus else None + ## + + + self.conv1 = conv_block( + nf, + gc, + kernel_size, + stride, + bias=bias, + pad_type=pad_type, + norm_type=norm_type, + act_type=act_type, + mode=mode, + c2x2=c2x2, + ) + self.conv2 = conv_block( + nf + gc, + gc, + kernel_size, + stride, + bias=bias, + pad_type=pad_type, + norm_type=norm_type, + act_type=act_type, + mode=mode, + c2x2=c2x2, + ) + self.conv3 = conv_block( + nf + 2 * gc, + gc, + kernel_size, + stride, + bias=bias, + pad_type=pad_type, + norm_type=norm_type, + act_type=act_type, + mode=mode, + c2x2=c2x2, + ) + self.conv4 = conv_block( + nf + 3 * gc, + gc, + kernel_size, + stride, + bias=bias, + pad_type=pad_type, + norm_type=norm_type, + act_type=act_type, + mode=mode, + c2x2=c2x2, + ) + if mode == "CNA": + last_act = None + else: + last_act = act_type + self.conv5 = conv_block( + nf + 4 * gc, + nf, + 3, + stride, + bias=bias, + pad_type=pad_type, + norm_type=norm_type, + act_type=last_act, + mode=mode, + c2x2=c2x2, + ) + + def forward(self, x): + x1 = self.conv1(x) + x2 = self.conv2(torch.cat((x, x1), 1)) + if self.conv1x1: + # pylint: disable=not-callable + x2 = x2 + self.conv1x1(x) # + + x3 = self.conv3(torch.cat((x, x1, x2), 1)) + x4 = self.conv4(torch.cat((x, x1, x2, x3), 1)) + if self.conv1x1: + x4 = x4 + x2 # + + x5 = self.conv5(torch.cat((x, x1, x2, x3, x4), 1)) + return x5 * 0.2 + x + + +def conv1x1(in_planes, out_planes, stride=1): + return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False) + + +#################### +# Upsampler +#################### + + +def pixelshuffle_block( + in_nc: int, + out_nc: int, + upscale_factor=2, + kernel_size=3, + stride=1, + bias=True, + pad_type="zero", + norm_type: str | None = None, + act_type="relu", +): + """ + Pixel shuffle layer + (Real-Time Single Image and Video Super-Resolution Using an Efficient Sub-Pixel Convolutional + Neural Network, CVPR17) + """ + conv = conv_block( + in_nc, + out_nc * (upscale_factor**2), + kernel_size, + stride, + bias=bias, + pad_type=pad_type, + norm_type=None, + act_type=None, + ) + pixel_shuffle = nn.PixelShuffle(upscale_factor) + + n = norm(norm_type, out_nc) if norm_type else None + a = act(act_type) if act_type else None + return sequential(conv, pixel_shuffle, n, a) + + +def upconv_block( + in_nc: int, + out_nc: int, + upscale_factor=2, + kernel_size=3, + stride=1, + bias=True, + pad_type="zero", + norm_type: str | None = None, + act_type="relu", + mode="nearest", + c2x2=False, +): + # Up conv + # described in https://distill.pub/2016/deconv-checkerboard/ + upsample = nn.Upsample(scale_factor=upscale_factor, mode=mode) + conv = conv_block( + in_nc, + out_nc, + kernel_size, + stride, + bias=bias, + pad_type=pad_type, + norm_type=norm_type, + act_type=act_type, + c2x2=c2x2, + ) + return sequential(upsample, conv) diff --git a/ldm_patched/pfn/architecture/face/LICENSE-GFPGAN b/ldm_patched/pfn/architecture/face/LICENSE-GFPGAN new file mode 100644 index 0000000000000000000000000000000000000000..5ac273fd509e328f396e6e4444673a3b051a4968 --- /dev/null +++ b/ldm_patched/pfn/architecture/face/LICENSE-GFPGAN @@ -0,0 +1,351 @@ +Tencent is pleased to support the open source community by making GFPGAN available. + +Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved. + +GFPGAN is licensed under the Apache License Version 2.0 except for the third-party components listed below. + + +Terms of the Apache License Version 2.0: +--------------------------------------------- +Apache License + +Version 2.0, January 2004 + +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION +1. Definitions. + +“License” shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +“Licensor” shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +“Legal Entity” shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, “control” means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +“You” (or “Your”) shall mean an individual or Legal Entity exercising permissions granted by this License. + +“Source” form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +“Object” form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +“Work” shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +“Derivative Works” shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +“Contribution” shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, “submitted” means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as “Not a Contribution.” + +“Contributor” shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of this License; and + +You must cause any modified files to carry prominent notices stating that You changed the files; and + +You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + +If the Work includes a “NOTICE” text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. + +You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + + + +Other dependencies and licenses: + + +Open Source Software licensed under the Apache 2.0 license and Other Licenses of the Third-Party Components therein: +--------------------------------------------- +1. basicsr +Copyright 2018-2020 BasicSR Authors + + +This BasicSR project is released under the Apache 2.0 license. + +A copy of Apache 2.0 is included in this file. + +StyleGAN2 +The codes are modified from the repository stylegan2-pytorch. Many thanks to the author - Kim Seonghyeon 😊 for translating from the official TensorFlow codes to PyTorch ones. Here is the license of stylegan2-pytorch. +The official repository is https://github.com/NVlabs/stylegan2, and here is the NVIDIA license. +DFDNet +The codes are largely modified from the repository DFDNet. Their license is Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License. + +Terms of the Nvidia License: +--------------------------------------------- + +1. Definitions + +"Licensor" means any person or entity that distributes its Work. + +"Software" means the original work of authorship made available under +this License. + +"Work" means the Software and any additions to or derivative works of +the Software that are made available under this License. + +"Nvidia Processors" means any central processing unit (CPU), graphics +processing unit (GPU), field-programmable gate array (FPGA), +application-specific integrated circuit (ASIC) or any combination +thereof designed, made, sold, or provided by Nvidia or its affiliates. + +The terms "reproduce," "reproduction," "derivative works," and +"distribution" have the meaning as provided under U.S. copyright law; +provided, however, that for the purposes of this License, derivative +works shall not include works that remain separable from, or merely +link (or bind by name) to the interfaces of, the Work. + +Works, including the Software, are "made available" under this License +by including in or with the Work either (a) a copyright notice +referencing the applicability of this License to the Work, or (b) a +copy of this License. + +2. License Grants + + 2.1 Copyright Grant. Subject to the terms and conditions of this + License, each Licensor grants to you a perpetual, worldwide, + non-exclusive, royalty-free, copyright license to reproduce, + prepare derivative works of, publicly display, publicly perform, + sublicense and distribute its Work and any resulting derivative + works in any form. + +3. Limitations + + 3.1 Redistribution. You may reproduce or distribute the Work only + if (a) you do so under this License, (b) you include a complete + copy of this License with your distribution, and (c) you retain + without modification any copyright, patent, trademark, or + attribution notices that are present in the Work. + + 3.2 Derivative Works. You may specify that additional or different + terms apply to the use, reproduction, and distribution of your + derivative works of the Work ("Your Terms") only if (a) Your Terms + provide that the use limitation in Section 3.3 applies to your + derivative works, and (b) you identify the specific derivative + works that are subject to Your Terms. Notwithstanding Your Terms, + this License (including the redistribution requirements in Section + 3.1) will continue to apply to the Work itself. + + 3.3 Use Limitation. The Work and any derivative works thereof only + may be used or intended for use non-commercially. The Work or + derivative works thereof may be used or intended for use by Nvidia + or its affiliates commercially or non-commercially. As used herein, + "non-commercially" means for research or evaluation purposes only. + + 3.4 Patent Claims. If you bring or threaten to bring a patent claim + against any Licensor (including any claim, cross-claim or + counterclaim in a lawsuit) to enforce any patents that you allege + are infringed by any Work, then your rights under this License from + such Licensor (including the grants in Sections 2.1 and 2.2) will + terminate immediately. + + 3.5 Trademarks. This License does not grant any rights to use any + Licensor's or its affiliates' names, logos, or trademarks, except + as necessary to reproduce the notices described in this License. + + 3.6 Termination. If you violate any term of this License, then your + rights under this License (including the grants in Sections 2.1 and + 2.2) will terminate immediately. + +4. Disclaimer of Warranty. + +THE WORK IS PROVIDED "AS IS" WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WARRANTIES OR CONDITIONS OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE OR +NON-INFRINGEMENT. YOU BEAR THE RISK OF UNDERTAKING ANY ACTIVITIES UNDER +THIS LICENSE. + +5. Limitation of Liability. + +EXCEPT AS PROHIBITED BY APPLICABLE LAW, IN NO EVENT AND UNDER NO LEGAL +THEORY, WHETHER IN TORT (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE +SHALL ANY LICENSOR BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY DIRECT, +INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF +OR RELATED TO THIS LICENSE, THE USE OR INABILITY TO USE THE WORK +(INCLUDING BUT NOT LIMITED TO LOSS OF GOODWILL, BUSINESS INTERRUPTION, +LOST PROFITS OR DATA, COMPUTER FAILURE OR MALFUNCTION, OR ANY OTHER +COMMERCIAL DAMAGES OR LOSSES), EVEN IF THE LICENSOR HAS BEEN ADVISED OF +THE POSSIBILITY OF SUCH DAMAGES. + +MIT License + +Copyright (c) 2019 Kim Seonghyeon + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + + +Open Source Software licensed under the BSD 3-Clause license: +--------------------------------------------- +1. torchvision +Copyright (c) Soumith Chintala 2016, +All rights reserved. + +2. torch +Copyright (c) 2016- Facebook, Inc (Adam Paszke) +Copyright (c) 2014- Facebook, Inc (Soumith Chintala) +Copyright (c) 2011-2014 Idiap Research Institute (Ronan Collobert) +Copyright (c) 2012-2014 Deepmind Technologies (Koray Kavukcuoglu) +Copyright (c) 2011-2012 NEC Laboratories America (Koray Kavukcuoglu) +Copyright (c) 2011-2013 NYU (Clement Farabet) +Copyright (c) 2006-2010 NEC Laboratories America (Ronan Collobert, Leon Bottou, Iain Melvin, Jason Weston) +Copyright (c) 2006 Idiap Research Institute (Samy Bengio) +Copyright (c) 2001-2004 Idiap Research Institute (Ronan Collobert, Samy Bengio, Johnny Mariethoz) + + +Terms of the BSD 3-Clause License: +--------------------------------------------- +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + + +Open Source Software licensed under the BSD 3-Clause License and Other Licenses of the Third-Party Components therein: +--------------------------------------------- +1. numpy +Copyright (c) 2005-2020, NumPy Developers. +All rights reserved. + +A copy of BSD 3-Clause License is included in this file. + +The NumPy repository and source distributions bundle several libraries that are +compatibly licensed. We list these here. + +Name: Numpydoc +Files: doc/sphinxext/numpydoc/* +License: BSD-2-Clause + For details, see doc/sphinxext/LICENSE.txt + +Name: scipy-sphinx-theme +Files: doc/scipy-sphinx-theme/* +License: BSD-3-Clause AND PSF-2.0 AND Apache-2.0 + For details, see doc/scipy-sphinx-theme/LICENSE.txt + +Name: lapack-lite +Files: numpy/linalg/lapack_lite/* +License: BSD-3-Clause + For details, see numpy/linalg/lapack_lite/LICENSE.txt + +Name: tempita +Files: tools/npy_tempita/* +License: MIT + For details, see tools/npy_tempita/license.txt + +Name: dragon4 +Files: numpy/core/src/multiarray/dragon4.c +License: MIT + For license text, see numpy/core/src/multiarray/dragon4.c + + + +Open Source Software licensed under the MIT license: +--------------------------------------------- +1. facexlib +Copyright (c) 2020 Xintao Wang + +2. opencv-python +Copyright (c) Olli-Pekka Heinisuo +Please note that only files in cv2 package are used. + + +Terms of the MIT License: +--------------------------------------------- +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + + +Open Source Software licensed under the MIT license and Other Licenses of the Third-Party Components therein: +--------------------------------------------- +1. tqdm +Copyright (c) 2013 noamraph + +`tqdm` is a product of collaborative work. +Unless otherwise stated, all authors (see commit logs) retain copyright +for their respective work, and release the work under the MIT licence +(text below). + +Exceptions or notable authors are listed below +in reverse chronological order: + +* files: * + MPLv2.0 2015-2020 (c) Casper da Costa-Luis + [casperdcl](https://github.com/casperdcl). +* files: tqdm/_tqdm.py + MIT 2016 (c) [PR #96] on behalf of Google Inc. +* files: tqdm/_tqdm.py setup.py README.rst MANIFEST.in .gitignore + MIT 2013 (c) Noam Yorav-Raphael, original author. + +[PR #96]: https://github.com/tqdm/tqdm/pull/96 + + +Mozilla Public Licence (MPL) v. 2.0 - Exhibit A +----------------------------------------------- + +This Source Code Form is subject to the terms of the +Mozilla Public License, v. 2.0. +If a copy of the MPL was not distributed with this file, +You can obtain one at https://mozilla.org/MPL/2.0/. + + +MIT License (MIT) +----------------- + +Copyright (c) 2013 noamraph + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/ldm_patched/pfn/architecture/face/LICENSE-RestoreFormer b/ldm_patched/pfn/architecture/face/LICENSE-RestoreFormer new file mode 100644 index 0000000000000000000000000000000000000000..5ac273fd509e328f396e6e4444673a3b051a4968 --- /dev/null +++ b/ldm_patched/pfn/architecture/face/LICENSE-RestoreFormer @@ -0,0 +1,351 @@ +Tencent is pleased to support the open source community by making GFPGAN available. + +Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved. + +GFPGAN is licensed under the Apache License Version 2.0 except for the third-party components listed below. + + +Terms of the Apache License Version 2.0: +--------------------------------------------- +Apache License + +Version 2.0, January 2004 + +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION +1. Definitions. + +“License” shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +“Licensor” shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +“Legal Entity” shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, “control” means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +“You” (or “Your”) shall mean an individual or Legal Entity exercising permissions granted by this License. + +“Source” form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +“Object” form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +“Work” shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +“Derivative Works” shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +“Contribution” shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, “submitted” means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as “Not a Contribution.” + +“Contributor” shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of this License; and + +You must cause any modified files to carry prominent notices stating that You changed the files; and + +You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + +If the Work includes a “NOTICE” text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. + +You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + + + +Other dependencies and licenses: + + +Open Source Software licensed under the Apache 2.0 license and Other Licenses of the Third-Party Components therein: +--------------------------------------------- +1. basicsr +Copyright 2018-2020 BasicSR Authors + + +This BasicSR project is released under the Apache 2.0 license. + +A copy of Apache 2.0 is included in this file. + +StyleGAN2 +The codes are modified from the repository stylegan2-pytorch. Many thanks to the author - Kim Seonghyeon 😊 for translating from the official TensorFlow codes to PyTorch ones. Here is the license of stylegan2-pytorch. +The official repository is https://github.com/NVlabs/stylegan2, and here is the NVIDIA license. +DFDNet +The codes are largely modified from the repository DFDNet. Their license is Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License. + +Terms of the Nvidia License: +--------------------------------------------- + +1. Definitions + +"Licensor" means any person or entity that distributes its Work. + +"Software" means the original work of authorship made available under +this License. + +"Work" means the Software and any additions to or derivative works of +the Software that are made available under this License. + +"Nvidia Processors" means any central processing unit (CPU), graphics +processing unit (GPU), field-programmable gate array (FPGA), +application-specific integrated circuit (ASIC) or any combination +thereof designed, made, sold, or provided by Nvidia or its affiliates. + +The terms "reproduce," "reproduction," "derivative works," and +"distribution" have the meaning as provided under U.S. copyright law; +provided, however, that for the purposes of this License, derivative +works shall not include works that remain separable from, or merely +link (or bind by name) to the interfaces of, the Work. + +Works, including the Software, are "made available" under this License +by including in or with the Work either (a) a copyright notice +referencing the applicability of this License to the Work, or (b) a +copy of this License. + +2. License Grants + + 2.1 Copyright Grant. Subject to the terms and conditions of this + License, each Licensor grants to you a perpetual, worldwide, + non-exclusive, royalty-free, copyright license to reproduce, + prepare derivative works of, publicly display, publicly perform, + sublicense and distribute its Work and any resulting derivative + works in any form. + +3. Limitations + + 3.1 Redistribution. You may reproduce or distribute the Work only + if (a) you do so under this License, (b) you include a complete + copy of this License with your distribution, and (c) you retain + without modification any copyright, patent, trademark, or + attribution notices that are present in the Work. + + 3.2 Derivative Works. You may specify that additional or different + terms apply to the use, reproduction, and distribution of your + derivative works of the Work ("Your Terms") only if (a) Your Terms + provide that the use limitation in Section 3.3 applies to your + derivative works, and (b) you identify the specific derivative + works that are subject to Your Terms. Notwithstanding Your Terms, + this License (including the redistribution requirements in Section + 3.1) will continue to apply to the Work itself. + + 3.3 Use Limitation. The Work and any derivative works thereof only + may be used or intended for use non-commercially. The Work or + derivative works thereof may be used or intended for use by Nvidia + or its affiliates commercially or non-commercially. As used herein, + "non-commercially" means for research or evaluation purposes only. + + 3.4 Patent Claims. If you bring or threaten to bring a patent claim + against any Licensor (including any claim, cross-claim or + counterclaim in a lawsuit) to enforce any patents that you allege + are infringed by any Work, then your rights under this License from + such Licensor (including the grants in Sections 2.1 and 2.2) will + terminate immediately. + + 3.5 Trademarks. This License does not grant any rights to use any + Licensor's or its affiliates' names, logos, or trademarks, except + as necessary to reproduce the notices described in this License. + + 3.6 Termination. If you violate any term of this License, then your + rights under this License (including the grants in Sections 2.1 and + 2.2) will terminate immediately. + +4. Disclaimer of Warranty. + +THE WORK IS PROVIDED "AS IS" WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WARRANTIES OR CONDITIONS OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE OR +NON-INFRINGEMENT. YOU BEAR THE RISK OF UNDERTAKING ANY ACTIVITIES UNDER +THIS LICENSE. + +5. Limitation of Liability. + +EXCEPT AS PROHIBITED BY APPLICABLE LAW, IN NO EVENT AND UNDER NO LEGAL +THEORY, WHETHER IN TORT (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE +SHALL ANY LICENSOR BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY DIRECT, +INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF +OR RELATED TO THIS LICENSE, THE USE OR INABILITY TO USE THE WORK +(INCLUDING BUT NOT LIMITED TO LOSS OF GOODWILL, BUSINESS INTERRUPTION, +LOST PROFITS OR DATA, COMPUTER FAILURE OR MALFUNCTION, OR ANY OTHER +COMMERCIAL DAMAGES OR LOSSES), EVEN IF THE LICENSOR HAS BEEN ADVISED OF +THE POSSIBILITY OF SUCH DAMAGES. + +MIT License + +Copyright (c) 2019 Kim Seonghyeon + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + + +Open Source Software licensed under the BSD 3-Clause license: +--------------------------------------------- +1. torchvision +Copyright (c) Soumith Chintala 2016, +All rights reserved. + +2. torch +Copyright (c) 2016- Facebook, Inc (Adam Paszke) +Copyright (c) 2014- Facebook, Inc (Soumith Chintala) +Copyright (c) 2011-2014 Idiap Research Institute (Ronan Collobert) +Copyright (c) 2012-2014 Deepmind Technologies (Koray Kavukcuoglu) +Copyright (c) 2011-2012 NEC Laboratories America (Koray Kavukcuoglu) +Copyright (c) 2011-2013 NYU (Clement Farabet) +Copyright (c) 2006-2010 NEC Laboratories America (Ronan Collobert, Leon Bottou, Iain Melvin, Jason Weston) +Copyright (c) 2006 Idiap Research Institute (Samy Bengio) +Copyright (c) 2001-2004 Idiap Research Institute (Ronan Collobert, Samy Bengio, Johnny Mariethoz) + + +Terms of the BSD 3-Clause License: +--------------------------------------------- +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + + +Open Source Software licensed under the BSD 3-Clause License and Other Licenses of the Third-Party Components therein: +--------------------------------------------- +1. numpy +Copyright (c) 2005-2020, NumPy Developers. +All rights reserved. + +A copy of BSD 3-Clause License is included in this file. + +The NumPy repository and source distributions bundle several libraries that are +compatibly licensed. We list these here. + +Name: Numpydoc +Files: doc/sphinxext/numpydoc/* +License: BSD-2-Clause + For details, see doc/sphinxext/LICENSE.txt + +Name: scipy-sphinx-theme +Files: doc/scipy-sphinx-theme/* +License: BSD-3-Clause AND PSF-2.0 AND Apache-2.0 + For details, see doc/scipy-sphinx-theme/LICENSE.txt + +Name: lapack-lite +Files: numpy/linalg/lapack_lite/* +License: BSD-3-Clause + For details, see numpy/linalg/lapack_lite/LICENSE.txt + +Name: tempita +Files: tools/npy_tempita/* +License: MIT + For details, see tools/npy_tempita/license.txt + +Name: dragon4 +Files: numpy/core/src/multiarray/dragon4.c +License: MIT + For license text, see numpy/core/src/multiarray/dragon4.c + + + +Open Source Software licensed under the MIT license: +--------------------------------------------- +1. facexlib +Copyright (c) 2020 Xintao Wang + +2. opencv-python +Copyright (c) Olli-Pekka Heinisuo +Please note that only files in cv2 package are used. + + +Terms of the MIT License: +--------------------------------------------- +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + + +Open Source Software licensed under the MIT license and Other Licenses of the Third-Party Components therein: +--------------------------------------------- +1. tqdm +Copyright (c) 2013 noamraph + +`tqdm` is a product of collaborative work. +Unless otherwise stated, all authors (see commit logs) retain copyright +for their respective work, and release the work under the MIT licence +(text below). + +Exceptions or notable authors are listed below +in reverse chronological order: + +* files: * + MPLv2.0 2015-2020 (c) Casper da Costa-Luis + [casperdcl](https://github.com/casperdcl). +* files: tqdm/_tqdm.py + MIT 2016 (c) [PR #96] on behalf of Google Inc. +* files: tqdm/_tqdm.py setup.py README.rst MANIFEST.in .gitignore + MIT 2013 (c) Noam Yorav-Raphael, original author. + +[PR #96]: https://github.com/tqdm/tqdm/pull/96 + + +Mozilla Public Licence (MPL) v. 2.0 - Exhibit A +----------------------------------------------- + +This Source Code Form is subject to the terms of the +Mozilla Public License, v. 2.0. +If a copy of the MPL was not distributed with this file, +You can obtain one at https://mozilla.org/MPL/2.0/. + + +MIT License (MIT) +----------------- + +Copyright (c) 2013 noamraph + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/ldm_patched/pfn/architecture/face/LICENSE-codeformer b/ldm_patched/pfn/architecture/face/LICENSE-codeformer new file mode 100644 index 0000000000000000000000000000000000000000..be6c4ed8048a7cb436376bbea84cb0bd726ab721 --- /dev/null +++ b/ldm_patched/pfn/architecture/face/LICENSE-codeformer @@ -0,0 +1,35 @@ +S-Lab License 1.0 + +Copyright 2022 S-Lab + +Redistribution and use for non-commercial purpose in source and +binary forms, with or without modification, are permitted provided +that the following conditions are met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +In the event that redistribution and/or use for commercial purpose in +source or binary forms, with or without modification is required, +please contact the contributor(s) of the work. diff --git a/ldm_patched/pfn/architecture/face/__pycache__/codeformer.cpython-310.pyc b/ldm_patched/pfn/architecture/face/__pycache__/codeformer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bd76c822edff51e5ff4b03e3a9636be5a093cf80 Binary files /dev/null and b/ldm_patched/pfn/architecture/face/__pycache__/codeformer.cpython-310.pyc differ diff --git a/ldm_patched/pfn/architecture/face/__pycache__/gfpganv1_clean_arch.cpython-310.pyc b/ldm_patched/pfn/architecture/face/__pycache__/gfpganv1_clean_arch.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4976a68c9d5a5ccc6241c95f2f89ef79e93f2e90 Binary files /dev/null and b/ldm_patched/pfn/architecture/face/__pycache__/gfpganv1_clean_arch.cpython-310.pyc differ diff --git a/ldm_patched/pfn/architecture/face/__pycache__/restoreformer_arch.cpython-310.pyc b/ldm_patched/pfn/architecture/face/__pycache__/restoreformer_arch.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..43ec0bec6e524e6913bbcc53ab9fdeb7921d8ca8 Binary files /dev/null and b/ldm_patched/pfn/architecture/face/__pycache__/restoreformer_arch.cpython-310.pyc differ diff --git a/ldm_patched/pfn/architecture/face/__pycache__/stylegan2_clean_arch.cpython-310.pyc b/ldm_patched/pfn/architecture/face/__pycache__/stylegan2_clean_arch.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..373a35fe3a486d2cad94b445c6004a3d806f6b2f Binary files /dev/null and b/ldm_patched/pfn/architecture/face/__pycache__/stylegan2_clean_arch.cpython-310.pyc differ diff --git a/ldm_patched/pfn/architecture/face/arcface_arch.py b/ldm_patched/pfn/architecture/face/arcface_arch.py new file mode 100644 index 0000000000000000000000000000000000000000..b548af059a71b38c6c18cd35cbfed7bae7e55441 --- /dev/null +++ b/ldm_patched/pfn/architecture/face/arcface_arch.py @@ -0,0 +1,265 @@ +import torch.nn as nn + + +def conv3x3(inplanes, outplanes, stride=1): + """A simple wrapper for 3x3 convolution with padding. + + Args: + inplanes (int): Channel number of inputs. + outplanes (int): Channel number of outputs. + stride (int): Stride in convolution. Default: 1. + """ + return nn.Conv2d( + inplanes, outplanes, kernel_size=3, stride=stride, padding=1, bias=False + ) + + +class BasicBlock(nn.Module): + """Basic residual block used in the ResNetArcFace architecture. + + Args: + inplanes (int): Channel number of inputs. + planes (int): Channel number of outputs. + stride (int): Stride in convolution. Default: 1. + downsample (nn.Module): The downsample module. Default: None. + """ + + expansion = 1 # output channel expansion ratio + + def __init__(self, inplanes, planes, stride=1, downsample=None): + super(BasicBlock, self).__init__() + self.conv1 = conv3x3(inplanes, planes, stride) + self.bn1 = nn.BatchNorm2d(planes) + self.relu = nn.ReLU(inplace=True) + self.conv2 = conv3x3(planes, planes) + self.bn2 = nn.BatchNorm2d(planes) + self.downsample = downsample + self.stride = stride + + def forward(self, x): + residual = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + + if self.downsample is not None: + residual = self.downsample(x) + + out += residual + out = self.relu(out) + + return out + + +class IRBlock(nn.Module): + """Improved residual block (IR Block) used in the ResNetArcFace architecture. + + Args: + inplanes (int): Channel number of inputs. + planes (int): Channel number of outputs. + stride (int): Stride in convolution. Default: 1. + downsample (nn.Module): The downsample module. Default: None. + use_se (bool): Whether use the SEBlock (squeeze and excitation block). Default: True. + """ + + expansion = 1 # output channel expansion ratio + + def __init__(self, inplanes, planes, stride=1, downsample=None, use_se=True): + super(IRBlock, self).__init__() + self.bn0 = nn.BatchNorm2d(inplanes) + self.conv1 = conv3x3(inplanes, inplanes) + self.bn1 = nn.BatchNorm2d(inplanes) + self.prelu = nn.PReLU() + self.conv2 = conv3x3(inplanes, planes, stride) + self.bn2 = nn.BatchNorm2d(planes) + self.downsample = downsample + self.stride = stride + self.use_se = use_se + if self.use_se: + self.se = SEBlock(planes) + + def forward(self, x): + residual = x + out = self.bn0(x) + out = self.conv1(out) + out = self.bn1(out) + out = self.prelu(out) + + out = self.conv2(out) + out = self.bn2(out) + if self.use_se: + out = self.se(out) + + if self.downsample is not None: + residual = self.downsample(x) + + out += residual + out = self.prelu(out) + + return out + + +class Bottleneck(nn.Module): + """Bottleneck block used in the ResNetArcFace architecture. + + Args: + inplanes (int): Channel number of inputs. + planes (int): Channel number of outputs. + stride (int): Stride in convolution. Default: 1. + downsample (nn.Module): The downsample module. Default: None. + """ + + expansion = 4 # output channel expansion ratio + + def __init__(self, inplanes, planes, stride=1, downsample=None): + super(Bottleneck, self).__init__() + self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False) + self.bn1 = nn.BatchNorm2d(planes) + self.conv2 = nn.Conv2d( + planes, planes, kernel_size=3, stride=stride, padding=1, bias=False + ) + self.bn2 = nn.BatchNorm2d(planes) + self.conv3 = nn.Conv2d( + planes, planes * self.expansion, kernel_size=1, bias=False + ) + self.bn3 = nn.BatchNorm2d(planes * self.expansion) + self.relu = nn.ReLU(inplace=True) + self.downsample = downsample + self.stride = stride + + def forward(self, x): + residual = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + out = self.relu(out) + + out = self.conv3(out) + out = self.bn3(out) + + if self.downsample is not None: + residual = self.downsample(x) + + out += residual + out = self.relu(out) + + return out + + +class SEBlock(nn.Module): + """The squeeze-and-excitation block (SEBlock) used in the IRBlock. + + Args: + channel (int): Channel number of inputs. + reduction (int): Channel reduction ration. Default: 16. + """ + + def __init__(self, channel, reduction=16): + super(SEBlock, self).__init__() + self.avg_pool = nn.AdaptiveAvgPool2d( + 1 + ) # pool to 1x1 without spatial information + self.fc = nn.Sequential( + nn.Linear(channel, channel // reduction), + nn.PReLU(), + nn.Linear(channel // reduction, channel), + nn.Sigmoid(), + ) + + def forward(self, x): + b, c, _, _ = x.size() + y = self.avg_pool(x).view(b, c) + y = self.fc(y).view(b, c, 1, 1) + return x * y + + +class ResNetArcFace(nn.Module): + """ArcFace with ResNet architectures. + + Ref: ArcFace: Additive Angular Margin Loss for Deep Face Recognition. + + Args: + block (str): Block used in the ArcFace architecture. + layers (tuple(int)): Block numbers in each layer. + use_se (bool): Whether use the SEBlock (squeeze and excitation block). Default: True. + """ + + def __init__(self, block, layers, use_se=True): + if block == "IRBlock": + block = IRBlock + self.inplanes = 64 + self.use_se = use_se + super(ResNetArcFace, self).__init__() + + self.conv1 = nn.Conv2d(1, 64, kernel_size=3, padding=1, bias=False) + self.bn1 = nn.BatchNorm2d(64) + self.prelu = nn.PReLU() + self.maxpool = nn.MaxPool2d(kernel_size=2, stride=2) + self.layer1 = self._make_layer(block, 64, layers[0]) + self.layer2 = self._make_layer(block, 128, layers[1], stride=2) + self.layer3 = self._make_layer(block, 256, layers[2], stride=2) + self.layer4 = self._make_layer(block, 512, layers[3], stride=2) + self.bn4 = nn.BatchNorm2d(512) + self.dropout = nn.Dropout() + self.fc5 = nn.Linear(512 * 8 * 8, 512) + self.bn5 = nn.BatchNorm1d(512) + + # initialization + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.xavier_normal_(m.weight) + elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.Linear): + nn.init.xavier_normal_(m.weight) + nn.init.constant_(m.bias, 0) + + def _make_layer(self, block, planes, num_blocks, stride=1): + downsample = None + if stride != 1 or self.inplanes != planes * block.expansion: + downsample = nn.Sequential( + nn.Conv2d( + self.inplanes, + planes * block.expansion, + kernel_size=1, + stride=stride, + bias=False, + ), + nn.BatchNorm2d(planes * block.expansion), + ) + layers = [] + layers.append( + block(self.inplanes, planes, stride, downsample, use_se=self.use_se) + ) + self.inplanes = planes + for _ in range(1, num_blocks): + layers.append(block(self.inplanes, planes, use_se=self.use_se)) + + return nn.Sequential(*layers) + + def forward(self, x): + x = self.conv1(x) + x = self.bn1(x) + x = self.prelu(x) + x = self.maxpool(x) + + x = self.layer1(x) + x = self.layer2(x) + x = self.layer3(x) + x = self.layer4(x) + x = self.bn4(x) + x = self.dropout(x) + x = x.view(x.size(0), -1) + x = self.fc5(x) + x = self.bn5(x) + + return x diff --git a/ldm_patched/pfn/architecture/face/codeformer.py b/ldm_patched/pfn/architecture/face/codeformer.py new file mode 100644 index 0000000000000000000000000000000000000000..a0e2e985e8f4487547779574c6b210da412a8e71 --- /dev/null +++ b/ldm_patched/pfn/architecture/face/codeformer.py @@ -0,0 +1,790 @@ +""" +Modified from https://github.com/sczhou/CodeFormer +VQGAN code, adapted from the original created by the Unleashing Transformers authors: +https://github.com/samb-t/unleashing-transformers/blob/master/models/vqgan.py +This version of the arch specifically was gathered from an old version of GFPGAN. If this is a problem, please contact me. +""" +import math +from typing import Optional + +import torch +import torch.nn as nn +import torch.nn.functional as F +import logging as logger +from torch import Tensor + + +class VectorQuantizer(nn.Module): + def __init__(self, codebook_size, emb_dim, beta): + super(VectorQuantizer, self).__init__() + self.codebook_size = codebook_size # number of embeddings + self.emb_dim = emb_dim # dimension of embedding + self.beta = beta # commitment cost used in loss term, beta * ||z_e(x)-sg[e]||^2 + self.embedding = nn.Embedding(self.codebook_size, self.emb_dim) + self.embedding.weight.data.uniform_( + -1.0 / self.codebook_size, 1.0 / self.codebook_size + ) + + def forward(self, z): + # reshape z -> (batch, height, width, channel) and flatten + z = z.permute(0, 2, 3, 1).contiguous() + z_flattened = z.view(-1, self.emb_dim) + + # distances from z to embeddings e_j (z - e)^2 = z^2 + e^2 - 2 e * z + d = ( + (z_flattened**2).sum(dim=1, keepdim=True) + + (self.embedding.weight**2).sum(1) + - 2 * torch.matmul(z_flattened, self.embedding.weight.t()) + ) + + mean_distance = torch.mean(d) + # find closest encodings + # min_encoding_indices = torch.argmin(d, dim=1).unsqueeze(1) + min_encoding_scores, min_encoding_indices = torch.topk( + d, 1, dim=1, largest=False + ) + # [0-1], higher score, higher confidence + min_encoding_scores = torch.exp(-min_encoding_scores / 10) + + min_encodings = torch.zeros( + min_encoding_indices.shape[0], self.codebook_size + ).to(z) + min_encodings.scatter_(1, min_encoding_indices, 1) + + # get quantized latent vectors + z_q = torch.matmul(min_encodings, self.embedding.weight).view(z.shape) + # compute loss for embedding + loss = torch.mean((z_q.detach() - z) ** 2) + self.beta * torch.mean( + (z_q - z.detach()) ** 2 + ) + # preserve gradients + z_q = z + (z_q - z).detach() + + # perplexity + e_mean = torch.mean(min_encodings, dim=0) + perplexity = torch.exp(-torch.sum(e_mean * torch.log(e_mean + 1e-10))) + # reshape back to match original input shape + z_q = z_q.permute(0, 3, 1, 2).contiguous() + + return ( + z_q, + loss, + { + "perplexity": perplexity, + "min_encodings": min_encodings, + "min_encoding_indices": min_encoding_indices, + "min_encoding_scores": min_encoding_scores, + "mean_distance": mean_distance, + }, + ) + + def get_codebook_feat(self, indices, shape): + # input indices: batch*token_num -> (batch*token_num)*1 + # shape: batch, height, width, channel + indices = indices.view(-1, 1) + min_encodings = torch.zeros(indices.shape[0], self.codebook_size).to(indices) + min_encodings.scatter_(1, indices, 1) + # get quantized latent vectors + z_q = torch.matmul(min_encodings.float(), self.embedding.weight) + + if shape is not None: # reshape back to match original input shape + z_q = z_q.view(shape).permute(0, 3, 1, 2).contiguous() + + return z_q + + +class GumbelQuantizer(nn.Module): + def __init__( + self, + codebook_size, + emb_dim, + num_hiddens, + straight_through=False, + kl_weight=5e-4, + temp_init=1.0, + ): + super().__init__() + self.codebook_size = codebook_size # number of embeddings + self.emb_dim = emb_dim # dimension of embedding + self.straight_through = straight_through + self.temperature = temp_init + self.kl_weight = kl_weight + self.proj = nn.Conv2d( + num_hiddens, codebook_size, 1 + ) # projects last encoder layer to quantized logits + self.embed = nn.Embedding(codebook_size, emb_dim) + + def forward(self, z): + hard = self.straight_through if self.training else True + + logits = self.proj(z) + + soft_one_hot = F.gumbel_softmax(logits, tau=self.temperature, dim=1, hard=hard) + + z_q = torch.einsum("b n h w, n d -> b d h w", soft_one_hot, self.embed.weight) + + # + kl divergence to the prior loss + qy = F.softmax(logits, dim=1) + diff = ( + self.kl_weight + * torch.sum(qy * torch.log(qy * self.codebook_size + 1e-10), dim=1).mean() + ) + min_encoding_indices = soft_one_hot.argmax(dim=1) + + return z_q, diff, {"min_encoding_indices": min_encoding_indices} + + +class Downsample(nn.Module): + def __init__(self, in_channels): + super().__init__() + self.conv = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=3, stride=2, padding=0 + ) + + def forward(self, x): + pad = (0, 1, 0, 1) + x = torch.nn.functional.pad(x, pad, mode="constant", value=0) + x = self.conv(x) + return x + + +class Upsample(nn.Module): + def __init__(self, in_channels): + super().__init__() + self.conv = nn.Conv2d( + in_channels, in_channels, kernel_size=3, stride=1, padding=1 + ) + + def forward(self, x): + x = F.interpolate(x, scale_factor=2.0, mode="nearest") + x = self.conv(x) + + return x + + +class AttnBlock(nn.Module): + def __init__(self, in_channels): + super().__init__() + self.in_channels = in_channels + + self.norm = normalize(in_channels) + self.q = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + self.k = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + self.v = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + self.proj_out = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + + def forward(self, x): + h_ = x + h_ = self.norm(h_) + q = self.q(h_) + k = self.k(h_) + v = self.v(h_) + + # compute attention + b, c, h, w = q.shape + q = q.reshape(b, c, h * w) + q = q.permute(0, 2, 1) + k = k.reshape(b, c, h * w) + w_ = torch.bmm(q, k) + w_ = w_ * (int(c) ** (-0.5)) + w_ = F.softmax(w_, dim=2) + + # attend to values + v = v.reshape(b, c, h * w) + w_ = w_.permute(0, 2, 1) + h_ = torch.bmm(v, w_) + h_ = h_.reshape(b, c, h, w) + + h_ = self.proj_out(h_) + + return x + h_ + + +class Encoder(nn.Module): + def __init__( + self, + in_channels, + nf, + out_channels, + ch_mult, + num_res_blocks, + resolution, + attn_resolutions, + ): + super().__init__() + self.nf = nf + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.attn_resolutions = attn_resolutions + + curr_res = self.resolution + in_ch_mult = (1,) + tuple(ch_mult) + + blocks = [] + # initial convultion + blocks.append(nn.Conv2d(in_channels, nf, kernel_size=3, stride=1, padding=1)) + + # residual and downsampling blocks, with attention on smaller res (16x16) + for i in range(self.num_resolutions): + block_in_ch = nf * in_ch_mult[i] + block_out_ch = nf * ch_mult[i] + for _ in range(self.num_res_blocks): + blocks.append(ResBlock(block_in_ch, block_out_ch)) + block_in_ch = block_out_ch + if curr_res in attn_resolutions: + blocks.append(AttnBlock(block_in_ch)) + + if i != self.num_resolutions - 1: + blocks.append(Downsample(block_in_ch)) + curr_res = curr_res // 2 + + # non-local attention block + blocks.append(ResBlock(block_in_ch, block_in_ch)) # type: ignore + blocks.append(AttnBlock(block_in_ch)) # type: ignore + blocks.append(ResBlock(block_in_ch, block_in_ch)) # type: ignore + + # normalise and convert to latent size + blocks.append(normalize(block_in_ch)) # type: ignore + blocks.append( + nn.Conv2d(block_in_ch, out_channels, kernel_size=3, stride=1, padding=1) # type: ignore + ) + self.blocks = nn.ModuleList(blocks) + + def forward(self, x): + for block in self.blocks: + x = block(x) + + return x + + +class Generator(nn.Module): + def __init__(self, nf, ch_mult, res_blocks, img_size, attn_resolutions, emb_dim): + super().__init__() + self.nf = nf + self.ch_mult = ch_mult + self.num_resolutions = len(self.ch_mult) + self.num_res_blocks = res_blocks + self.resolution = img_size + self.attn_resolutions = attn_resolutions + self.in_channels = emb_dim + self.out_channels = 3 + block_in_ch = self.nf * self.ch_mult[-1] + curr_res = self.resolution // 2 ** (self.num_resolutions - 1) + + blocks = [] + # initial conv + blocks.append( + nn.Conv2d(self.in_channels, block_in_ch, kernel_size=3, stride=1, padding=1) + ) + + # non-local attention block + blocks.append(ResBlock(block_in_ch, block_in_ch)) + blocks.append(AttnBlock(block_in_ch)) + blocks.append(ResBlock(block_in_ch, block_in_ch)) + + for i in reversed(range(self.num_resolutions)): + block_out_ch = self.nf * self.ch_mult[i] + + for _ in range(self.num_res_blocks): + blocks.append(ResBlock(block_in_ch, block_out_ch)) + block_in_ch = block_out_ch + + if curr_res in self.attn_resolutions: + blocks.append(AttnBlock(block_in_ch)) + + if i != 0: + blocks.append(Upsample(block_in_ch)) + curr_res = curr_res * 2 + + blocks.append(normalize(block_in_ch)) + blocks.append( + nn.Conv2d( + block_in_ch, self.out_channels, kernel_size=3, stride=1, padding=1 + ) + ) + + self.blocks = nn.ModuleList(blocks) + + def forward(self, x): + for block in self.blocks: + x = block(x) + + return x + + +class VQAutoEncoder(nn.Module): + def __init__( + self, + img_size, + nf, + ch_mult, + quantizer="nearest", + res_blocks=2, + attn_resolutions=[16], + codebook_size=1024, + emb_dim=256, + beta=0.25, + gumbel_straight_through=False, + gumbel_kl_weight=1e-8, + model_path=None, + ): + super().__init__() + self.in_channels = 3 + self.nf = nf + self.n_blocks = res_blocks + self.codebook_size = codebook_size + self.embed_dim = emb_dim + self.ch_mult = ch_mult + self.resolution = img_size + self.attn_resolutions = attn_resolutions + self.quantizer_type = quantizer + self.encoder = Encoder( + self.in_channels, + self.nf, + self.embed_dim, + self.ch_mult, + self.n_blocks, + self.resolution, + self.attn_resolutions, + ) + if self.quantizer_type == "nearest": + self.beta = beta # 0.25 + self.quantize = VectorQuantizer( + self.codebook_size, self.embed_dim, self.beta + ) + elif self.quantizer_type == "gumbel": + self.gumbel_num_hiddens = emb_dim + self.straight_through = gumbel_straight_through + self.kl_weight = gumbel_kl_weight + self.quantize = GumbelQuantizer( + self.codebook_size, + self.embed_dim, + self.gumbel_num_hiddens, + self.straight_through, + self.kl_weight, + ) + self.generator = Generator( + nf, ch_mult, res_blocks, img_size, attn_resolutions, emb_dim + ) + + if model_path is not None: + chkpt = torch.load(model_path, map_location="cpu") + if "params_ema" in chkpt: + self.load_state_dict( + torch.load(model_path, map_location="cpu")["params_ema"] + ) + logger.info(f"vqgan is loaded from: {model_path} [params_ema]") + elif "params" in chkpt: + self.load_state_dict( + torch.load(model_path, map_location="cpu")["params"] + ) + logger.info(f"vqgan is loaded from: {model_path} [params]") + else: + raise ValueError("Wrong params!") + + def forward(self, x): + x = self.encoder(x) + quant, codebook_loss, quant_stats = self.quantize(x) + x = self.generator(quant) + return x, codebook_loss, quant_stats + + +def calc_mean_std(feat, eps=1e-5): + """Calculate mean and std for adaptive_instance_normalization. + Args: + feat (Tensor): 4D tensor. + eps (float): A small value added to the variance to avoid + divide-by-zero. Default: 1e-5. + """ + size = feat.size() + assert len(size) == 4, "The input feature should be 4D tensor." + b, c = size[:2] + feat_var = feat.view(b, c, -1).var(dim=2) + eps + feat_std = feat_var.sqrt().view(b, c, 1, 1) + feat_mean = feat.view(b, c, -1).mean(dim=2).view(b, c, 1, 1) + return feat_mean, feat_std + + +def adaptive_instance_normalization(content_feat, style_feat): + """Adaptive instance normalization. + Adjust the reference features to have the similar color and illuminations + as those in the degradate features. + Args: + content_feat (Tensor): The reference feature. + style_feat (Tensor): The degradate features. + """ + size = content_feat.size() + style_mean, style_std = calc_mean_std(style_feat) + content_mean, content_std = calc_mean_std(content_feat) + normalized_feat = (content_feat - content_mean.expand(size)) / content_std.expand( + size + ) + return normalized_feat * style_std.expand(size) + style_mean.expand(size) + + +class PositionEmbeddingSine(nn.Module): + """ + This is a more standard version of the position embedding, very similar to the one + used by the Attention is all you need paper, generalized to work on images. + """ + + def __init__( + self, num_pos_feats=64, temperature=10000, normalize=False, scale=None + ): + super().__init__() + self.num_pos_feats = num_pos_feats + self.temperature = temperature + self.normalize = normalize + if scale is not None and normalize is False: + raise ValueError("normalize should be True if scale is passed") + if scale is None: + scale = 2 * math.pi + self.scale = scale + + def forward(self, x, mask=None): + if mask is None: + mask = torch.zeros( + (x.size(0), x.size(2), x.size(3)), device=x.device, dtype=torch.bool + ) + not_mask = ~mask # pylint: disable=invalid-unary-operand-type + y_embed = not_mask.cumsum(1, dtype=torch.float32) + x_embed = not_mask.cumsum(2, dtype=torch.float32) + if self.normalize: + eps = 1e-6 + y_embed = y_embed / (y_embed[:, -1:, :] + eps) * self.scale + x_embed = x_embed / (x_embed[:, :, -1:] + eps) * self.scale + + dim_t = torch.arange(self.num_pos_feats, dtype=torch.float32, device=x.device) + dim_t = self.temperature ** (2 * (dim_t // 2) / self.num_pos_feats) + + pos_x = x_embed[:, :, :, None] / dim_t + pos_y = y_embed[:, :, :, None] / dim_t + pos_x = torch.stack( + (pos_x[:, :, :, 0::2].sin(), pos_x[:, :, :, 1::2].cos()), dim=4 + ).flatten(3) + pos_y = torch.stack( + (pos_y[:, :, :, 0::2].sin(), pos_y[:, :, :, 1::2].cos()), dim=4 + ).flatten(3) + pos = torch.cat((pos_y, pos_x), dim=3).permute(0, 3, 1, 2) + return pos + + +def _get_activation_fn(activation): + """Return an activation function given a string""" + if activation == "relu": + return F.relu + if activation == "gelu": + return F.gelu + if activation == "glu": + return F.glu + raise RuntimeError(f"activation should be relu/gelu, not {activation}.") + + +class TransformerSALayer(nn.Module): + def __init__( + self, embed_dim, nhead=8, dim_mlp=2048, dropout=0.0, activation="gelu" + ): + super().__init__() + self.self_attn = nn.MultiheadAttention(embed_dim, nhead, dropout=dropout) + # Implementation of Feedforward model - MLP + self.linear1 = nn.Linear(embed_dim, dim_mlp) + self.dropout = nn.Dropout(dropout) + self.linear2 = nn.Linear(dim_mlp, embed_dim) + + self.norm1 = nn.LayerNorm(embed_dim) + self.norm2 = nn.LayerNorm(embed_dim) + self.dropout1 = nn.Dropout(dropout) + self.dropout2 = nn.Dropout(dropout) + + self.activation = _get_activation_fn(activation) + + def with_pos_embed(self, tensor, pos: Optional[Tensor]): + return tensor if pos is None else tensor + pos + + def forward( + self, + tgt, + tgt_mask: Optional[Tensor] = None, + tgt_key_padding_mask: Optional[Tensor] = None, + query_pos: Optional[Tensor] = None, + ): + # self attention + tgt2 = self.norm1(tgt) + q = k = self.with_pos_embed(tgt2, query_pos) + tgt2 = self.self_attn( + q, k, value=tgt2, attn_mask=tgt_mask, key_padding_mask=tgt_key_padding_mask + )[0] + tgt = tgt + self.dropout1(tgt2) + + # ffn + tgt2 = self.norm2(tgt) + tgt2 = self.linear2(self.dropout(self.activation(self.linear1(tgt2)))) + tgt = tgt + self.dropout2(tgt2) + return tgt + + +def normalize(in_channels): + return torch.nn.GroupNorm( + num_groups=32, num_channels=in_channels, eps=1e-6, affine=True + ) + + +@torch.jit.script # type: ignore +def swish(x): + return x * torch.sigmoid(x) + + +class ResBlock(nn.Module): + def __init__(self, in_channels, out_channels=None): + super(ResBlock, self).__init__() + self.in_channels = in_channels + self.out_channels = in_channels if out_channels is None else out_channels + self.norm1 = normalize(in_channels) + self.conv1 = nn.Conv2d( + in_channels, out_channels, kernel_size=3, stride=1, padding=1 # type: ignore + ) + self.norm2 = normalize(out_channels) + self.conv2 = nn.Conv2d( + out_channels, out_channels, kernel_size=3, stride=1, padding=1 # type: ignore + ) + if self.in_channels != self.out_channels: + self.conv_out = nn.Conv2d( + in_channels, out_channels, kernel_size=1, stride=1, padding=0 # type: ignore + ) + + def forward(self, x_in): + x = x_in + x = self.norm1(x) + x = swish(x) + x = self.conv1(x) + x = self.norm2(x) + x = swish(x) + x = self.conv2(x) + if self.in_channels != self.out_channels: + x_in = self.conv_out(x_in) + + return x + x_in + + +class Fuse_sft_block(nn.Module): + def __init__(self, in_ch, out_ch): + super().__init__() + self.encode_enc = ResBlock(2 * in_ch, out_ch) + + self.scale = nn.Sequential( + nn.Conv2d(in_ch, out_ch, kernel_size=3, padding=1), + nn.LeakyReLU(0.2, True), + nn.Conv2d(out_ch, out_ch, kernel_size=3, padding=1), + ) + + self.shift = nn.Sequential( + nn.Conv2d(in_ch, out_ch, kernel_size=3, padding=1), + nn.LeakyReLU(0.2, True), + nn.Conv2d(out_ch, out_ch, kernel_size=3, padding=1), + ) + + def forward(self, enc_feat, dec_feat, w=1): + enc_feat = self.encode_enc(torch.cat([enc_feat, dec_feat], dim=1)) + scale = self.scale(enc_feat) + shift = self.shift(enc_feat) + residual = w * (dec_feat * scale + shift) + out = dec_feat + residual + return out + + +class CodeFormer(VQAutoEncoder): + def __init__(self, state_dict): + dim_embd = 512 + n_head = 8 + n_layers = 9 + codebook_size = 1024 + latent_size = 256 + connect_list = ["32", "64", "128", "256"] + fix_modules = ["quantize", "generator"] + + # This is just a guess as I only have one model to look at + position_emb = state_dict["position_emb"] + dim_embd = position_emb.shape[1] + latent_size = position_emb.shape[0] + + try: + n_layers = len( + set([x.split(".")[1] for x in state_dict.keys() if "ft_layers" in x]) + ) + except: + pass + + codebook_size = state_dict["quantize.embedding.weight"].shape[0] + + # This is also just another guess + n_head_exp = ( + state_dict["ft_layers.0.self_attn.in_proj_weight"].shape[0] // dim_embd + ) + n_head = 2**n_head_exp + + in_nc = state_dict["encoder.blocks.0.weight"].shape[1] + + self.model_arch = "CodeFormer" + self.sub_type = "Face SR" + self.scale = 8 + self.in_nc = in_nc + self.out_nc = in_nc + + self.state = state_dict + + self.supports_fp16 = False + self.supports_bf16 = True + self.min_size_restriction = 16 + + super(CodeFormer, self).__init__( + 512, 64, [1, 2, 2, 4, 4, 8], "nearest", 2, [16], codebook_size + ) + + if fix_modules is not None: + for module in fix_modules: + for param in getattr(self, module).parameters(): + param.requires_grad = False + + self.connect_list = connect_list + self.n_layers = n_layers + self.dim_embd = dim_embd + self.dim_mlp = dim_embd * 2 + + self.position_emb = nn.Parameter(torch.zeros(latent_size, self.dim_embd)) # type: ignore + self.feat_emb = nn.Linear(256, self.dim_embd) + + # transformer + self.ft_layers = nn.Sequential( + *[ + TransformerSALayer( + embed_dim=dim_embd, nhead=n_head, dim_mlp=self.dim_mlp, dropout=0.0 + ) + for _ in range(self.n_layers) + ] + ) + + # logits_predict head + self.idx_pred_layer = nn.Sequential( + nn.LayerNorm(dim_embd), nn.Linear(dim_embd, codebook_size, bias=False) + ) + + self.channels = { + "16": 512, + "32": 256, + "64": 256, + "128": 128, + "256": 128, + "512": 64, + } + + # after second residual block for > 16, before attn layer for ==16 + self.fuse_encoder_block = { + "512": 2, + "256": 5, + "128": 8, + "64": 11, + "32": 14, + "16": 18, + } + # after first residual block for > 16, before attn layer for ==16 + self.fuse_generator_block = { + "16": 6, + "32": 9, + "64": 12, + "128": 15, + "256": 18, + "512": 21, + } + + # fuse_convs_dict + self.fuse_convs_dict = nn.ModuleDict() + for f_size in self.connect_list: + in_ch = self.channels[f_size] + self.fuse_convs_dict[f_size] = Fuse_sft_block(in_ch, in_ch) + + self.load_state_dict(state_dict) + + def _init_weights(self, module): + if isinstance(module, (nn.Linear, nn.Embedding)): + module.weight.data.normal_(mean=0.0, std=0.02) + if isinstance(module, nn.Linear) and module.bias is not None: + module.bias.data.zero_() + elif isinstance(module, nn.LayerNorm): + module.bias.data.zero_() + module.weight.data.fill_(1.0) + + def forward(self, x, weight=0.5, **kwargs): + detach_16 = True + code_only = False + adain = True + # ################### Encoder ##################### + enc_feat_dict = {} + out_list = [self.fuse_encoder_block[f_size] for f_size in self.connect_list] + for i, block in enumerate(self.encoder.blocks): + x = block(x) + if i in out_list: + enc_feat_dict[str(x.shape[-1])] = x.clone() + + lq_feat = x + # ################# Transformer ################### + # quant_feat, codebook_loss, quant_stats = self.quantize(lq_feat) + pos_emb = self.position_emb.unsqueeze(1).repeat(1, x.shape[0], 1) + # BCHW -> BC(HW) -> (HW)BC + feat_emb = self.feat_emb(lq_feat.flatten(2).permute(2, 0, 1)) + query_emb = feat_emb + # Transformer encoder + for layer in self.ft_layers: + query_emb = layer(query_emb, query_pos=pos_emb) + + # output logits + logits = self.idx_pred_layer(query_emb) # (hw)bn + logits = logits.permute(1, 0, 2) # (hw)bn -> b(hw)n + + if code_only: # for training stage II + # logits doesn't need softmax before cross_entropy loss + return logits, lq_feat + + # ################# Quantization ################### + # if self.training: + # quant_feat = torch.einsum('btn,nc->btc', [soft_one_hot, self.quantize.embedding.weight]) + # # b(hw)c -> bc(hw) -> bchw + # quant_feat = quant_feat.permute(0,2,1).view(lq_feat.shape) + # ------------ + soft_one_hot = F.softmax(logits, dim=2) + _, top_idx = torch.topk(soft_one_hot, 1, dim=2) + quant_feat = self.quantize.get_codebook_feat( + top_idx, shape=[x.shape[0], 16, 16, 256] # type: ignore + ) + # preserve gradients + # quant_feat = lq_feat + (quant_feat - lq_feat).detach() + + if detach_16: + quant_feat = quant_feat.detach() # for training stage III + if adain: + quant_feat = adaptive_instance_normalization(quant_feat, lq_feat) + + # ################## Generator #################### + x = quant_feat + fuse_list = [self.fuse_generator_block[f_size] for f_size in self.connect_list] + + for i, block in enumerate(self.generator.blocks): + x = block(x) + if i in fuse_list: # fuse after i-th block + f_size = str(x.shape[-1]) + if weight > 0: + x = self.fuse_convs_dict[f_size]( + enc_feat_dict[f_size].detach(), x, weight + ) + out = x + # logits doesn't need softmax before cross_entropy loss + # return out, logits, lq_feat + return out, logits diff --git a/ldm_patched/pfn/architecture/face/fused_act.py b/ldm_patched/pfn/architecture/face/fused_act.py new file mode 100644 index 0000000000000000000000000000000000000000..7ed526547b4644ac6341947a801b76d9ed798f26 --- /dev/null +++ b/ldm_patched/pfn/architecture/face/fused_act.py @@ -0,0 +1,81 @@ +# pylint: skip-file +# type: ignore +# modify from https://github.com/rosinality/stylegan2-pytorch/blob/master/op/fused_act.py # noqa:E501 + +import torch +from torch import nn +from torch.autograd import Function + +fused_act_ext = None + + +class FusedLeakyReLUFunctionBackward(Function): + @staticmethod + def forward(ctx, grad_output, out, negative_slope, scale): + ctx.save_for_backward(out) + ctx.negative_slope = negative_slope + ctx.scale = scale + + empty = grad_output.new_empty(0) + + grad_input = fused_act_ext.fused_bias_act( + grad_output, empty, out, 3, 1, negative_slope, scale + ) + + dim = [0] + + if grad_input.ndim > 2: + dim += list(range(2, grad_input.ndim)) + + grad_bias = grad_input.sum(dim).detach() + + return grad_input, grad_bias + + @staticmethod + def backward(ctx, gradgrad_input, gradgrad_bias): + (out,) = ctx.saved_tensors + gradgrad_out = fused_act_ext.fused_bias_act( + gradgrad_input, gradgrad_bias, out, 3, 1, ctx.negative_slope, ctx.scale + ) + + return gradgrad_out, None, None, None + + +class FusedLeakyReLUFunction(Function): + @staticmethod + def forward(ctx, input, bias, negative_slope, scale): + empty = input.new_empty(0) + out = fused_act_ext.fused_bias_act( + input, bias, empty, 3, 0, negative_slope, scale + ) + ctx.save_for_backward(out) + ctx.negative_slope = negative_slope + ctx.scale = scale + + return out + + @staticmethod + def backward(ctx, grad_output): + (out,) = ctx.saved_tensors + + grad_input, grad_bias = FusedLeakyReLUFunctionBackward.apply( + grad_output, out, ctx.negative_slope, ctx.scale + ) + + return grad_input, grad_bias, None, None + + +class FusedLeakyReLU(nn.Module): + def __init__(self, channel, negative_slope=0.2, scale=2**0.5): + super().__init__() + + self.bias = nn.Parameter(torch.zeros(channel)) + self.negative_slope = negative_slope + self.scale = scale + + def forward(self, input): + return fused_leaky_relu(input, self.bias, self.negative_slope, self.scale) + + +def fused_leaky_relu(input, bias, negative_slope=0.2, scale=2**0.5): + return FusedLeakyReLUFunction.apply(input, bias, negative_slope, scale) diff --git a/ldm_patched/pfn/architecture/face/gfpgan_bilinear_arch.py b/ldm_patched/pfn/architecture/face/gfpgan_bilinear_arch.py new file mode 100644 index 0000000000000000000000000000000000000000..b6e820e006f52936c3399d3d37fdf571f2385dcb --- /dev/null +++ b/ldm_patched/pfn/architecture/face/gfpgan_bilinear_arch.py @@ -0,0 +1,389 @@ +# pylint: skip-file +# type: ignore +import math +import random + +import torch +from torch import nn + +from .gfpganv1_arch import ResUpBlock +from .stylegan2_bilinear_arch import ( + ConvLayer, + EqualConv2d, + EqualLinear, + ResBlock, + ScaledLeakyReLU, + StyleGAN2GeneratorBilinear, +) + + +class StyleGAN2GeneratorBilinearSFT(StyleGAN2GeneratorBilinear): + """StyleGAN2 Generator with SFT modulation (Spatial Feature Transform). + It is the bilinear version. It does not use the complicated UpFirDnSmooth function that is not friendly for + deployment. It can be easily converted to the clean version: StyleGAN2GeneratorCSFT. + Args: + out_size (int): The spatial size of outputs. + num_style_feat (int): Channel number of style features. Default: 512. + num_mlp (int): Layer number of MLP style layers. Default: 8. + channel_multiplier (int): Channel multiplier for large networks of StyleGAN2. Default: 2. + lr_mlp (float): Learning rate multiplier for mlp layers. Default: 0.01. + narrow (float): The narrow ratio for channels. Default: 1. + sft_half (bool): Whether to apply SFT on half of the input channels. Default: False. + """ + + def __init__( + self, + out_size, + num_style_feat=512, + num_mlp=8, + channel_multiplier=2, + lr_mlp=0.01, + narrow=1, + sft_half=False, + ): + super(StyleGAN2GeneratorBilinearSFT, self).__init__( + out_size, + num_style_feat=num_style_feat, + num_mlp=num_mlp, + channel_multiplier=channel_multiplier, + lr_mlp=lr_mlp, + narrow=narrow, + ) + self.sft_half = sft_half + + def forward( + self, + styles, + conditions, + input_is_latent=False, + noise=None, + randomize_noise=True, + truncation=1, + truncation_latent=None, + inject_index=None, + return_latents=False, + ): + """Forward function for StyleGAN2GeneratorBilinearSFT. + Args: + styles (list[Tensor]): Sample codes of styles. + conditions (list[Tensor]): SFT conditions to generators. + input_is_latent (bool): Whether input is latent style. Default: False. + noise (Tensor | None): Input noise or None. Default: None. + randomize_noise (bool): Randomize noise, used when 'noise' is False. Default: True. + truncation (float): The truncation ratio. Default: 1. + truncation_latent (Tensor | None): The truncation latent tensor. Default: None. + inject_index (int | None): The injection index for mixing noise. Default: None. + return_latents (bool): Whether to return style latents. Default: False. + """ + # style codes -> latents with Style MLP layer + if not input_is_latent: + styles = [self.style_mlp(s) for s in styles] + # noises + if noise is None: + if randomize_noise: + noise = [None] * self.num_layers # for each style conv layer + else: # use the stored noise + noise = [ + getattr(self.noises, f"noise{i}") for i in range(self.num_layers) + ] + # style truncation + if truncation < 1: + style_truncation = [] + for style in styles: + style_truncation.append( + truncation_latent + truncation * (style - truncation_latent) + ) + styles = style_truncation + # get style latents with injection + if len(styles) == 1: + inject_index = self.num_latent + + if styles[0].ndim < 3: + # repeat latent code for all the layers + latent = styles[0].unsqueeze(1).repeat(1, inject_index, 1) + else: # used for encoder with different latent code for each layer + latent = styles[0] + elif len(styles) == 2: # mixing noises + if inject_index is None: + inject_index = random.randint(1, self.num_latent - 1) + latent1 = styles[0].unsqueeze(1).repeat(1, inject_index, 1) + latent2 = ( + styles[1].unsqueeze(1).repeat(1, self.num_latent - inject_index, 1) + ) + latent = torch.cat([latent1, latent2], 1) + + # main generation + out = self.constant_input(latent.shape[0]) + out = self.style_conv1(out, latent[:, 0], noise=noise[0]) + skip = self.to_rgb1(out, latent[:, 1]) + + i = 1 + for conv1, conv2, noise1, noise2, to_rgb in zip( + self.style_convs[::2], + self.style_convs[1::2], + noise[1::2], + noise[2::2], + self.to_rgbs, + ): + out = conv1(out, latent[:, i], noise=noise1) + + # the conditions may have fewer levels + if i < len(conditions): + # SFT part to combine the conditions + if self.sft_half: # only apply SFT to half of the channels + out_same, out_sft = torch.split(out, int(out.size(1) // 2), dim=1) + out_sft = out_sft * conditions[i - 1] + conditions[i] + out = torch.cat([out_same, out_sft], dim=1) + else: # apply SFT to all the channels + out = out * conditions[i - 1] + conditions[i] + + out = conv2(out, latent[:, i + 1], noise=noise2) + skip = to_rgb(out, latent[:, i + 2], skip) # feature back to the rgb space + i += 2 + + image = skip + + if return_latents: + return image, latent + else: + return image, None + + +class GFPGANBilinear(nn.Module): + """The GFPGAN architecture: Unet + StyleGAN2 decoder with SFT. + It is the bilinear version and it does not use the complicated UpFirDnSmooth function that is not friendly for + deployment. It can be easily converted to the clean version: GFPGANv1Clean. + Ref: GFP-GAN: Towards Real-World Blind Face Restoration with Generative Facial Prior. + Args: + out_size (int): The spatial size of outputs. + num_style_feat (int): Channel number of style features. Default: 512. + channel_multiplier (int): Channel multiplier for large networks of StyleGAN2. Default: 2. + decoder_load_path (str): The path to the pre-trained decoder model (usually, the StyleGAN2). Default: None. + fix_decoder (bool): Whether to fix the decoder. Default: True. + num_mlp (int): Layer number of MLP style layers. Default: 8. + lr_mlp (float): Learning rate multiplier for mlp layers. Default: 0.01. + input_is_latent (bool): Whether input is latent style. Default: False. + different_w (bool): Whether to use different latent w for different layers. Default: False. + narrow (float): The narrow ratio for channels. Default: 1. + sft_half (bool): Whether to apply SFT on half of the input channels. Default: False. + """ + + def __init__( + self, + out_size, + num_style_feat=512, + channel_multiplier=1, + decoder_load_path=None, + fix_decoder=True, + # for stylegan decoder + num_mlp=8, + lr_mlp=0.01, + input_is_latent=False, + different_w=False, + narrow=1, + sft_half=False, + ): + super(GFPGANBilinear, self).__init__() + self.input_is_latent = input_is_latent + self.different_w = different_w + self.num_style_feat = num_style_feat + self.min_size_restriction = 512 + + unet_narrow = narrow * 0.5 # by default, use a half of input channels + channels = { + "4": int(512 * unet_narrow), + "8": int(512 * unet_narrow), + "16": int(512 * unet_narrow), + "32": int(512 * unet_narrow), + "64": int(256 * channel_multiplier * unet_narrow), + "128": int(128 * channel_multiplier * unet_narrow), + "256": int(64 * channel_multiplier * unet_narrow), + "512": int(32 * channel_multiplier * unet_narrow), + "1024": int(16 * channel_multiplier * unet_narrow), + } + + self.log_size = int(math.log(out_size, 2)) + first_out_size = 2 ** (int(math.log(out_size, 2))) + + self.conv_body_first = ConvLayer( + 3, channels[f"{first_out_size}"], 1, bias=True, activate=True + ) + + # downsample + in_channels = channels[f"{first_out_size}"] + self.conv_body_down = nn.ModuleList() + for i in range(self.log_size, 2, -1): + out_channels = channels[f"{2**(i - 1)}"] + self.conv_body_down.append(ResBlock(in_channels, out_channels)) + in_channels = out_channels + + self.final_conv = ConvLayer( + in_channels, channels["4"], 3, bias=True, activate=True + ) + + # upsample + in_channels = channels["4"] + self.conv_body_up = nn.ModuleList() + for i in range(3, self.log_size + 1): + out_channels = channels[f"{2**i}"] + self.conv_body_up.append(ResUpBlock(in_channels, out_channels)) + in_channels = out_channels + + # to RGB + self.toRGB = nn.ModuleList() + for i in range(3, self.log_size + 1): + self.toRGB.append( + EqualConv2d( + channels[f"{2**i}"], + 3, + 1, + stride=1, + padding=0, + bias=True, + bias_init_val=0, + ) + ) + + if different_w: + linear_out_channel = (int(math.log(out_size, 2)) * 2 - 2) * num_style_feat + else: + linear_out_channel = num_style_feat + + self.final_linear = EqualLinear( + channels["4"] * 4 * 4, + linear_out_channel, + bias=True, + bias_init_val=0, + lr_mul=1, + activation=None, + ) + + # the decoder: stylegan2 generator with SFT modulations + self.stylegan_decoder = StyleGAN2GeneratorBilinearSFT( + out_size=out_size, + num_style_feat=num_style_feat, + num_mlp=num_mlp, + channel_multiplier=channel_multiplier, + lr_mlp=lr_mlp, + narrow=narrow, + sft_half=sft_half, + ) + + # load pre-trained stylegan2 model if necessary + if decoder_load_path: + self.stylegan_decoder.load_state_dict( + torch.load( + decoder_load_path, map_location=lambda storage, loc: storage + )["params_ema"] + ) + # fix decoder without updating params + if fix_decoder: + for _, param in self.stylegan_decoder.named_parameters(): + param.requires_grad = False + + # for SFT modulations (scale and shift) + self.condition_scale = nn.ModuleList() + self.condition_shift = nn.ModuleList() + for i in range(3, self.log_size + 1): + out_channels = channels[f"{2**i}"] + if sft_half: + sft_out_channels = out_channels + else: + sft_out_channels = out_channels * 2 + self.condition_scale.append( + nn.Sequential( + EqualConv2d( + out_channels, + out_channels, + 3, + stride=1, + padding=1, + bias=True, + bias_init_val=0, + ), + ScaledLeakyReLU(0.2), + EqualConv2d( + out_channels, + sft_out_channels, + 3, + stride=1, + padding=1, + bias=True, + bias_init_val=1, + ), + ) + ) + self.condition_shift.append( + nn.Sequential( + EqualConv2d( + out_channels, + out_channels, + 3, + stride=1, + padding=1, + bias=True, + bias_init_val=0, + ), + ScaledLeakyReLU(0.2), + EqualConv2d( + out_channels, + sft_out_channels, + 3, + stride=1, + padding=1, + bias=True, + bias_init_val=0, + ), + ) + ) + + def forward(self, x, return_latents=False, return_rgb=True, randomize_noise=True): + """Forward function for GFPGANBilinear. + Args: + x (Tensor): Input images. + return_latents (bool): Whether to return style latents. Default: False. + return_rgb (bool): Whether return intermediate rgb images. Default: True. + randomize_noise (bool): Randomize noise, used when 'noise' is False. Default: True. + """ + conditions = [] + unet_skips = [] + out_rgbs = [] + + # encoder + feat = self.conv_body_first(x) + for i in range(self.log_size - 2): + feat = self.conv_body_down[i](feat) + unet_skips.insert(0, feat) + + feat = self.final_conv(feat) + + # style code + style_code = self.final_linear(feat.view(feat.size(0), -1)) + if self.different_w: + style_code = style_code.view(style_code.size(0), -1, self.num_style_feat) + + # decode + for i in range(self.log_size - 2): + # add unet skip + feat = feat + unet_skips[i] + # ResUpLayer + feat = self.conv_body_up[i](feat) + # generate scale and shift for SFT layers + scale = self.condition_scale[i](feat) + conditions.append(scale.clone()) + shift = self.condition_shift[i](feat) + conditions.append(shift.clone()) + # generate rgb images + if return_rgb: + out_rgbs.append(self.toRGB[i](feat)) + + # decoder + image, _ = self.stylegan_decoder( + [style_code], + conditions, + return_latents=return_latents, + input_is_latent=self.input_is_latent, + randomize_noise=randomize_noise, + ) + + return image, out_rgbs diff --git a/ldm_patched/pfn/architecture/face/gfpganv1_arch.py b/ldm_patched/pfn/architecture/face/gfpganv1_arch.py new file mode 100644 index 0000000000000000000000000000000000000000..72d72fc865ec35b2ccd23f13b3d8ef0be5dbaf7a --- /dev/null +++ b/ldm_patched/pfn/architecture/face/gfpganv1_arch.py @@ -0,0 +1,566 @@ +# pylint: skip-file +# type: ignore +import math +import random + +import torch +from torch import nn +from torch.nn import functional as F + +from .fused_act import FusedLeakyReLU +from .stylegan2_arch import ( + ConvLayer, + EqualConv2d, + EqualLinear, + ResBlock, + ScaledLeakyReLU, + StyleGAN2Generator, +) + + +class StyleGAN2GeneratorSFT(StyleGAN2Generator): + """StyleGAN2 Generator with SFT modulation (Spatial Feature Transform). + Args: + out_size (int): The spatial size of outputs. + num_style_feat (int): Channel number of style features. Default: 512. + num_mlp (int): Layer number of MLP style layers. Default: 8. + channel_multiplier (int): Channel multiplier for large networks of StyleGAN2. Default: 2. + resample_kernel (list[int]): A list indicating the 1D resample kernel magnitude. A cross production will be + applied to extent 1D resample kernel to 2D resample kernel. Default: (1, 3, 3, 1). + lr_mlp (float): Learning rate multiplier for mlp layers. Default: 0.01. + narrow (float): The narrow ratio for channels. Default: 1. + sft_half (bool): Whether to apply SFT on half of the input channels. Default: False. + """ + + def __init__( + self, + out_size, + num_style_feat=512, + num_mlp=8, + channel_multiplier=2, + resample_kernel=(1, 3, 3, 1), + lr_mlp=0.01, + narrow=1, + sft_half=False, + ): + super(StyleGAN2GeneratorSFT, self).__init__( + out_size, + num_style_feat=num_style_feat, + num_mlp=num_mlp, + channel_multiplier=channel_multiplier, + resample_kernel=resample_kernel, + lr_mlp=lr_mlp, + narrow=narrow, + ) + self.sft_half = sft_half + + def forward( + self, + styles, + conditions, + input_is_latent=False, + noise=None, + randomize_noise=True, + truncation=1, + truncation_latent=None, + inject_index=None, + return_latents=False, + ): + """Forward function for StyleGAN2GeneratorSFT. + Args: + styles (list[Tensor]): Sample codes of styles. + conditions (list[Tensor]): SFT conditions to generators. + input_is_latent (bool): Whether input is latent style. Default: False. + noise (Tensor | None): Input noise or None. Default: None. + randomize_noise (bool): Randomize noise, used when 'noise' is False. Default: True. + truncation (float): The truncation ratio. Default: 1. + truncation_latent (Tensor | None): The truncation latent tensor. Default: None. + inject_index (int | None): The injection index for mixing noise. Default: None. + return_latents (bool): Whether to return style latents. Default: False. + """ + # style codes -> latents with Style MLP layer + if not input_is_latent: + styles = [self.style_mlp(s) for s in styles] + # noises + if noise is None: + if randomize_noise: + noise = [None] * self.num_layers # for each style conv layer + else: # use the stored noise + noise = [ + getattr(self.noises, f"noise{i}") for i in range(self.num_layers) + ] + # style truncation + if truncation < 1: + style_truncation = [] + for style in styles: + style_truncation.append( + truncation_latent + truncation * (style - truncation_latent) + ) + styles = style_truncation + # get style latents with injection + if len(styles) == 1: + inject_index = self.num_latent + + if styles[0].ndim < 3: + # repeat latent code for all the layers + latent = styles[0].unsqueeze(1).repeat(1, inject_index, 1) + else: # used for encoder with different latent code for each layer + latent = styles[0] + elif len(styles) == 2: # mixing noises + if inject_index is None: + inject_index = random.randint(1, self.num_latent - 1) + latent1 = styles[0].unsqueeze(1).repeat(1, inject_index, 1) + latent2 = ( + styles[1].unsqueeze(1).repeat(1, self.num_latent - inject_index, 1) + ) + latent = torch.cat([latent1, latent2], 1) + + # main generation + out = self.constant_input(latent.shape[0]) + out = self.style_conv1(out, latent[:, 0], noise=noise[0]) + skip = self.to_rgb1(out, latent[:, 1]) + + i = 1 + for conv1, conv2, noise1, noise2, to_rgb in zip( + self.style_convs[::2], + self.style_convs[1::2], + noise[1::2], + noise[2::2], + self.to_rgbs, + ): + out = conv1(out, latent[:, i], noise=noise1) + + # the conditions may have fewer levels + if i < len(conditions): + # SFT part to combine the conditions + if self.sft_half: # only apply SFT to half of the channels + out_same, out_sft = torch.split(out, int(out.size(1) // 2), dim=1) + out_sft = out_sft * conditions[i - 1] + conditions[i] + out = torch.cat([out_same, out_sft], dim=1) + else: # apply SFT to all the channels + out = out * conditions[i - 1] + conditions[i] + + out = conv2(out, latent[:, i + 1], noise=noise2) + skip = to_rgb(out, latent[:, i + 2], skip) # feature back to the rgb space + i += 2 + + image = skip + + if return_latents: + return image, latent + else: + return image, None + + +class ConvUpLayer(nn.Module): + """Convolutional upsampling layer. It uses bilinear upsampler + Conv. + Args: + in_channels (int): Channel number of the input. + out_channels (int): Channel number of the output. + kernel_size (int): Size of the convolving kernel. + stride (int): Stride of the convolution. Default: 1 + padding (int): Zero-padding added to both sides of the input. Default: 0. + bias (bool): If ``True``, adds a learnable bias to the output. Default: ``True``. + bias_init_val (float): Bias initialized value. Default: 0. + activate (bool): Whether use activateion. Default: True. + """ + + def __init__( + self, + in_channels, + out_channels, + kernel_size, + stride=1, + padding=0, + bias=True, + bias_init_val=0, + activate=True, + ): + super(ConvUpLayer, self).__init__() + self.in_channels = in_channels + self.out_channels = out_channels + self.kernel_size = kernel_size + self.stride = stride + self.padding = padding + # self.scale is used to scale the convolution weights, which is related to the common initializations. + self.scale = 1 / math.sqrt(in_channels * kernel_size**2) + + self.weight = nn.Parameter( + torch.randn(out_channels, in_channels, kernel_size, kernel_size) + ) + + if bias and not activate: + self.bias = nn.Parameter(torch.zeros(out_channels).fill_(bias_init_val)) + else: + self.register_parameter("bias", None) + + # activation + if activate: + if bias: + self.activation = FusedLeakyReLU(out_channels) + else: + self.activation = ScaledLeakyReLU(0.2) + else: + self.activation = None + + def forward(self, x): + # bilinear upsample + out = F.interpolate(x, scale_factor=2, mode="bilinear", align_corners=False) + # conv + out = F.conv2d( + out, + self.weight * self.scale, + bias=self.bias, + stride=self.stride, + padding=self.padding, + ) + # activation + if self.activation is not None: + out = self.activation(out) + return out + + +class ResUpBlock(nn.Module): + """Residual block with upsampling. + Args: + in_channels (int): Channel number of the input. + out_channels (int): Channel number of the output. + """ + + def __init__(self, in_channels, out_channels): + super(ResUpBlock, self).__init__() + + self.conv1 = ConvLayer(in_channels, in_channels, 3, bias=True, activate=True) + self.conv2 = ConvUpLayer( + in_channels, out_channels, 3, stride=1, padding=1, bias=True, activate=True + ) + self.skip = ConvUpLayer( + in_channels, out_channels, 1, bias=False, activate=False + ) + + def forward(self, x): + out = self.conv1(x) + out = self.conv2(out) + skip = self.skip(x) + out = (out + skip) / math.sqrt(2) + return out + + +class GFPGANv1(nn.Module): + """The GFPGAN architecture: Unet + StyleGAN2 decoder with SFT. + Ref: GFP-GAN: Towards Real-World Blind Face Restoration with Generative Facial Prior. + Args: + out_size (int): The spatial size of outputs. + num_style_feat (int): Channel number of style features. Default: 512. + channel_multiplier (int): Channel multiplier for large networks of StyleGAN2. Default: 2. + resample_kernel (list[int]): A list indicating the 1D resample kernel magnitude. A cross production will be + applied to extent 1D resample kernel to 2D resample kernel. Default: (1, 3, 3, 1). + decoder_load_path (str): The path to the pre-trained decoder model (usually, the StyleGAN2). Default: None. + fix_decoder (bool): Whether to fix the decoder. Default: True. + num_mlp (int): Layer number of MLP style layers. Default: 8. + lr_mlp (float): Learning rate multiplier for mlp layers. Default: 0.01. + input_is_latent (bool): Whether input is latent style. Default: False. + different_w (bool): Whether to use different latent w for different layers. Default: False. + narrow (float): The narrow ratio for channels. Default: 1. + sft_half (bool): Whether to apply SFT on half of the input channels. Default: False. + """ + + def __init__( + self, + out_size, + num_style_feat=512, + channel_multiplier=1, + resample_kernel=(1, 3, 3, 1), + decoder_load_path=None, + fix_decoder=True, + # for stylegan decoder + num_mlp=8, + lr_mlp=0.01, + input_is_latent=False, + different_w=False, + narrow=1, + sft_half=False, + ): + super(GFPGANv1, self).__init__() + self.input_is_latent = input_is_latent + self.different_w = different_w + self.num_style_feat = num_style_feat + + unet_narrow = narrow * 0.5 # by default, use a half of input channels + channels = { + "4": int(512 * unet_narrow), + "8": int(512 * unet_narrow), + "16": int(512 * unet_narrow), + "32": int(512 * unet_narrow), + "64": int(256 * channel_multiplier * unet_narrow), + "128": int(128 * channel_multiplier * unet_narrow), + "256": int(64 * channel_multiplier * unet_narrow), + "512": int(32 * channel_multiplier * unet_narrow), + "1024": int(16 * channel_multiplier * unet_narrow), + } + + self.log_size = int(math.log(out_size, 2)) + first_out_size = 2 ** (int(math.log(out_size, 2))) + + self.conv_body_first = ConvLayer( + 3, channels[f"{first_out_size}"], 1, bias=True, activate=True + ) + + # downsample + in_channels = channels[f"{first_out_size}"] + self.conv_body_down = nn.ModuleList() + for i in range(self.log_size, 2, -1): + out_channels = channels[f"{2**(i - 1)}"] + self.conv_body_down.append( + ResBlock(in_channels, out_channels, resample_kernel) + ) + in_channels = out_channels + + self.final_conv = ConvLayer( + in_channels, channels["4"], 3, bias=True, activate=True + ) + + # upsample + in_channels = channels["4"] + self.conv_body_up = nn.ModuleList() + for i in range(3, self.log_size + 1): + out_channels = channels[f"{2**i}"] + self.conv_body_up.append(ResUpBlock(in_channels, out_channels)) + in_channels = out_channels + + # to RGB + self.toRGB = nn.ModuleList() + for i in range(3, self.log_size + 1): + self.toRGB.append( + EqualConv2d( + channels[f"{2**i}"], + 3, + 1, + stride=1, + padding=0, + bias=True, + bias_init_val=0, + ) + ) + + if different_w: + linear_out_channel = (int(math.log(out_size, 2)) * 2 - 2) * num_style_feat + else: + linear_out_channel = num_style_feat + + self.final_linear = EqualLinear( + channels["4"] * 4 * 4, + linear_out_channel, + bias=True, + bias_init_val=0, + lr_mul=1, + activation=None, + ) + + # the decoder: stylegan2 generator with SFT modulations + self.stylegan_decoder = StyleGAN2GeneratorSFT( + out_size=out_size, + num_style_feat=num_style_feat, + num_mlp=num_mlp, + channel_multiplier=channel_multiplier, + resample_kernel=resample_kernel, + lr_mlp=lr_mlp, + narrow=narrow, + sft_half=sft_half, + ) + + # load pre-trained stylegan2 model if necessary + if decoder_load_path: + self.stylegan_decoder.load_state_dict( + torch.load( + decoder_load_path, map_location=lambda storage, loc: storage + )["params_ema"] + ) + # fix decoder without updating params + if fix_decoder: + for _, param in self.stylegan_decoder.named_parameters(): + param.requires_grad = False + + # for SFT modulations (scale and shift) + self.condition_scale = nn.ModuleList() + self.condition_shift = nn.ModuleList() + for i in range(3, self.log_size + 1): + out_channels = channels[f"{2**i}"] + if sft_half: + sft_out_channels = out_channels + else: + sft_out_channels = out_channels * 2 + self.condition_scale.append( + nn.Sequential( + EqualConv2d( + out_channels, + out_channels, + 3, + stride=1, + padding=1, + bias=True, + bias_init_val=0, + ), + ScaledLeakyReLU(0.2), + EqualConv2d( + out_channels, + sft_out_channels, + 3, + stride=1, + padding=1, + bias=True, + bias_init_val=1, + ), + ) + ) + self.condition_shift.append( + nn.Sequential( + EqualConv2d( + out_channels, + out_channels, + 3, + stride=1, + padding=1, + bias=True, + bias_init_val=0, + ), + ScaledLeakyReLU(0.2), + EqualConv2d( + out_channels, + sft_out_channels, + 3, + stride=1, + padding=1, + bias=True, + bias_init_val=0, + ), + ) + ) + + def forward( + self, x, return_latents=False, return_rgb=True, randomize_noise=True, **kwargs + ): + """Forward function for GFPGANv1. + Args: + x (Tensor): Input images. + return_latents (bool): Whether to return style latents. Default: False. + return_rgb (bool): Whether return intermediate rgb images. Default: True. + randomize_noise (bool): Randomize noise, used when 'noise' is False. Default: True. + """ + conditions = [] + unet_skips = [] + out_rgbs = [] + + # encoder + feat = self.conv_body_first(x) + for i in range(self.log_size - 2): + feat = self.conv_body_down[i](feat) + unet_skips.insert(0, feat) + + feat = self.final_conv(feat) + + # style code + style_code = self.final_linear(feat.view(feat.size(0), -1)) + if self.different_w: + style_code = style_code.view(style_code.size(0), -1, self.num_style_feat) + + # decode + for i in range(self.log_size - 2): + # add unet skip + feat = feat + unet_skips[i] + # ResUpLayer + feat = self.conv_body_up[i](feat) + # generate scale and shift for SFT layers + scale = self.condition_scale[i](feat) + conditions.append(scale.clone()) + shift = self.condition_shift[i](feat) + conditions.append(shift.clone()) + # generate rgb images + if return_rgb: + out_rgbs.append(self.toRGB[i](feat)) + + # decoder + image, _ = self.stylegan_decoder( + [style_code], + conditions, + return_latents=return_latents, + input_is_latent=self.input_is_latent, + randomize_noise=randomize_noise, + ) + + return image, out_rgbs + + +class FacialComponentDiscriminator(nn.Module): + """Facial component (eyes, mouth, noise) discriminator used in GFPGAN.""" + + def __init__(self): + super(FacialComponentDiscriminator, self).__init__() + # It now uses a VGG-style architectrue with fixed model size + self.conv1 = ConvLayer( + 3, + 64, + 3, + downsample=False, + resample_kernel=(1, 3, 3, 1), + bias=True, + activate=True, + ) + self.conv2 = ConvLayer( + 64, + 128, + 3, + downsample=True, + resample_kernel=(1, 3, 3, 1), + bias=True, + activate=True, + ) + self.conv3 = ConvLayer( + 128, + 128, + 3, + downsample=False, + resample_kernel=(1, 3, 3, 1), + bias=True, + activate=True, + ) + self.conv4 = ConvLayer( + 128, + 256, + 3, + downsample=True, + resample_kernel=(1, 3, 3, 1), + bias=True, + activate=True, + ) + self.conv5 = ConvLayer( + 256, + 256, + 3, + downsample=False, + resample_kernel=(1, 3, 3, 1), + bias=True, + activate=True, + ) + self.final_conv = ConvLayer(256, 1, 3, bias=True, activate=False) + + def forward(self, x, return_feats=False, **kwargs): + """Forward function for FacialComponentDiscriminator. + Args: + x (Tensor): Input images. + return_feats (bool): Whether to return intermediate features. Default: False. + """ + feat = self.conv1(x) + feat = self.conv3(self.conv2(feat)) + rlt_feats = [] + if return_feats: + rlt_feats.append(feat.clone()) + feat = self.conv5(self.conv4(feat)) + if return_feats: + rlt_feats.append(feat.clone()) + out = self.final_conv(feat) + + if return_feats: + return out, rlt_feats + else: + return out, None diff --git a/ldm_patched/pfn/architecture/face/gfpganv1_clean_arch.py b/ldm_patched/pfn/architecture/face/gfpganv1_clean_arch.py new file mode 100644 index 0000000000000000000000000000000000000000..16470d6345f71ed1517ff26f65b9cd125d80d99e --- /dev/null +++ b/ldm_patched/pfn/architecture/face/gfpganv1_clean_arch.py @@ -0,0 +1,370 @@ +# pylint: skip-file +# type: ignore +import math +import random + +import torch +from torch import nn +from torch.nn import functional as F + +from .stylegan2_clean_arch import StyleGAN2GeneratorClean + + +class StyleGAN2GeneratorCSFT(StyleGAN2GeneratorClean): + """StyleGAN2 Generator with SFT modulation (Spatial Feature Transform). + It is the clean version without custom compiled CUDA extensions used in StyleGAN2. + Args: + out_size (int): The spatial size of outputs. + num_style_feat (int): Channel number of style features. Default: 512. + num_mlp (int): Layer number of MLP style layers. Default: 8. + channel_multiplier (int): Channel multiplier for large networks of StyleGAN2. Default: 2. + narrow (float): The narrow ratio for channels. Default: 1. + sft_half (bool): Whether to apply SFT on half of the input channels. Default: False. + """ + + def __init__( + self, + out_size, + num_style_feat=512, + num_mlp=8, + channel_multiplier=2, + narrow=1, + sft_half=False, + ): + super(StyleGAN2GeneratorCSFT, self).__init__( + out_size, + num_style_feat=num_style_feat, + num_mlp=num_mlp, + channel_multiplier=channel_multiplier, + narrow=narrow, + ) + self.sft_half = sft_half + + def forward( + self, + styles, + conditions, + input_is_latent=False, + noise=None, + randomize_noise=True, + truncation=1, + truncation_latent=None, + inject_index=None, + return_latents=False, + ): + """Forward function for StyleGAN2GeneratorCSFT. + Args: + styles (list[Tensor]): Sample codes of styles. + conditions (list[Tensor]): SFT conditions to generators. + input_is_latent (bool): Whether input is latent style. Default: False. + noise (Tensor | None): Input noise or None. Default: None. + randomize_noise (bool): Randomize noise, used when 'noise' is False. Default: True. + truncation (float): The truncation ratio. Default: 1. + truncation_latent (Tensor | None): The truncation latent tensor. Default: None. + inject_index (int | None): The injection index for mixing noise. Default: None. + return_latents (bool): Whether to return style latents. Default: False. + """ + # style codes -> latents with Style MLP layer + if not input_is_latent: + styles = [self.style_mlp(s) for s in styles] + # noises + if noise is None: + if randomize_noise: + noise = [None] * self.num_layers # for each style conv layer + else: # use the stored noise + noise = [ + getattr(self.noises, f"noise{i}") for i in range(self.num_layers) + ] + # style truncation + if truncation < 1: + style_truncation = [] + for style in styles: + style_truncation.append( + truncation_latent + truncation * (style - truncation_latent) + ) + styles = style_truncation + # get style latents with injection + if len(styles) == 1: + inject_index = self.num_latent + + if styles[0].ndim < 3: + # repeat latent code for all the layers + latent = styles[0].unsqueeze(1).repeat(1, inject_index, 1) + else: # used for encoder with different latent code for each layer + latent = styles[0] + elif len(styles) == 2: # mixing noises + if inject_index is None: + inject_index = random.randint(1, self.num_latent - 1) + latent1 = styles[0].unsqueeze(1).repeat(1, inject_index, 1) + latent2 = ( + styles[1].unsqueeze(1).repeat(1, self.num_latent - inject_index, 1) + ) + latent = torch.cat([latent1, latent2], 1) + + # main generation + out = self.constant_input(latent.shape[0]) + out = self.style_conv1(out, latent[:, 0], noise=noise[0]) + skip = self.to_rgb1(out, latent[:, 1]) + + i = 1 + for conv1, conv2, noise1, noise2, to_rgb in zip( + self.style_convs[::2], + self.style_convs[1::2], + noise[1::2], + noise[2::2], + self.to_rgbs, + ): + out = conv1(out, latent[:, i], noise=noise1) + + # the conditions may have fewer levels + if i < len(conditions): + # SFT part to combine the conditions + if self.sft_half: # only apply SFT to half of the channels + out_same, out_sft = torch.split(out, int(out.size(1) // 2), dim=1) + out_sft = out_sft * conditions[i - 1] + conditions[i] + out = torch.cat([out_same, out_sft], dim=1) + else: # apply SFT to all the channels + out = out * conditions[i - 1] + conditions[i] + + out = conv2(out, latent[:, i + 1], noise=noise2) + skip = to_rgb(out, latent[:, i + 2], skip) # feature back to the rgb space + i += 2 + + image = skip + + if return_latents: + return image, latent + else: + return image, None + + +class ResBlock(nn.Module): + """Residual block with bilinear upsampling/downsampling. + Args: + in_channels (int): Channel number of the input. + out_channels (int): Channel number of the output. + mode (str): Upsampling/downsampling mode. Options: down | up. Default: down. + """ + + def __init__(self, in_channels, out_channels, mode="down"): + super(ResBlock, self).__init__() + + self.conv1 = nn.Conv2d(in_channels, in_channels, 3, 1, 1) + self.conv2 = nn.Conv2d(in_channels, out_channels, 3, 1, 1) + self.skip = nn.Conv2d(in_channels, out_channels, 1, bias=False) + if mode == "down": + self.scale_factor = 0.5 + elif mode == "up": + self.scale_factor = 2 + + def forward(self, x): + out = F.leaky_relu_(self.conv1(x), negative_slope=0.2) + # upsample/downsample + out = F.interpolate( + out, scale_factor=self.scale_factor, mode="bilinear", align_corners=False + ) + out = F.leaky_relu_(self.conv2(out), negative_slope=0.2) + # skip + x = F.interpolate( + x, scale_factor=self.scale_factor, mode="bilinear", align_corners=False + ) + skip = self.skip(x) + out = out + skip + return out + + +class GFPGANv1Clean(nn.Module): + """The GFPGAN architecture: Unet + StyleGAN2 decoder with SFT. + It is the clean version without custom compiled CUDA extensions used in StyleGAN2. + Ref: GFP-GAN: Towards Real-World Blind Face Restoration with Generative Facial Prior. + Args: + out_size (int): The spatial size of outputs. + num_style_feat (int): Channel number of style features. Default: 512. + channel_multiplier (int): Channel multiplier for large networks of StyleGAN2. Default: 2. + decoder_load_path (str): The path to the pre-trained decoder model (usually, the StyleGAN2). Default: None. + fix_decoder (bool): Whether to fix the decoder. Default: True. + num_mlp (int): Layer number of MLP style layers. Default: 8. + input_is_latent (bool): Whether input is latent style. Default: False. + different_w (bool): Whether to use different latent w for different layers. Default: False. + narrow (float): The narrow ratio for channels. Default: 1. + sft_half (bool): Whether to apply SFT on half of the input channels. Default: False. + """ + + def __init__( + self, + state_dict, + ): + super(GFPGANv1Clean, self).__init__() + + out_size = 512 + num_style_feat = 512 + channel_multiplier = 2 + decoder_load_path = None + fix_decoder = False + num_mlp = 8 + input_is_latent = True + different_w = True + narrow = 1 + sft_half = True + + self.model_arch = "GFPGAN" + self.sub_type = "Face SR" + self.scale = 8 + self.in_nc = 3 + self.out_nc = 3 + self.state = state_dict + + self.supports_fp16 = False + self.supports_bf16 = True + self.min_size_restriction = 512 + + self.input_is_latent = input_is_latent + self.different_w = different_w + self.num_style_feat = num_style_feat + + unet_narrow = narrow * 0.5 # by default, use a half of input channels + channels = { + "4": int(512 * unet_narrow), + "8": int(512 * unet_narrow), + "16": int(512 * unet_narrow), + "32": int(512 * unet_narrow), + "64": int(256 * channel_multiplier * unet_narrow), + "128": int(128 * channel_multiplier * unet_narrow), + "256": int(64 * channel_multiplier * unet_narrow), + "512": int(32 * channel_multiplier * unet_narrow), + "1024": int(16 * channel_multiplier * unet_narrow), + } + + self.log_size = int(math.log(out_size, 2)) + first_out_size = 2 ** (int(math.log(out_size, 2))) + + self.conv_body_first = nn.Conv2d(3, channels[f"{first_out_size}"], 1) + + # downsample + in_channels = channels[f"{first_out_size}"] + self.conv_body_down = nn.ModuleList() + for i in range(self.log_size, 2, -1): + out_channels = channels[f"{2**(i - 1)}"] + self.conv_body_down.append(ResBlock(in_channels, out_channels, mode="down")) + in_channels = out_channels + + self.final_conv = nn.Conv2d(in_channels, channels["4"], 3, 1, 1) + + # upsample + in_channels = channels["4"] + self.conv_body_up = nn.ModuleList() + for i in range(3, self.log_size + 1): + out_channels = channels[f"{2**i}"] + self.conv_body_up.append(ResBlock(in_channels, out_channels, mode="up")) + in_channels = out_channels + + # to RGB + self.toRGB = nn.ModuleList() + for i in range(3, self.log_size + 1): + self.toRGB.append(nn.Conv2d(channels[f"{2**i}"], 3, 1)) + + if different_w: + linear_out_channel = (int(math.log(out_size, 2)) * 2 - 2) * num_style_feat + else: + linear_out_channel = num_style_feat + + self.final_linear = nn.Linear(channels["4"] * 4 * 4, linear_out_channel) + + # the decoder: stylegan2 generator with SFT modulations + self.stylegan_decoder = StyleGAN2GeneratorCSFT( + out_size=out_size, + num_style_feat=num_style_feat, + num_mlp=num_mlp, + channel_multiplier=channel_multiplier, + narrow=narrow, + sft_half=sft_half, + ) + + # load pre-trained stylegan2 model if necessary + if decoder_load_path: + self.stylegan_decoder.load_state_dict( + torch.load( + decoder_load_path, map_location=lambda storage, loc: storage + )["params_ema"] + ) + # fix decoder without updating params + if fix_decoder: + for _, param in self.stylegan_decoder.named_parameters(): + param.requires_grad = False + + # for SFT modulations (scale and shift) + self.condition_scale = nn.ModuleList() + self.condition_shift = nn.ModuleList() + for i in range(3, self.log_size + 1): + out_channels = channels[f"{2**i}"] + if sft_half: + sft_out_channels = out_channels + else: + sft_out_channels = out_channels * 2 + self.condition_scale.append( + nn.Sequential( + nn.Conv2d(out_channels, out_channels, 3, 1, 1), + nn.LeakyReLU(0.2, True), + nn.Conv2d(out_channels, sft_out_channels, 3, 1, 1), + ) + ) + self.condition_shift.append( + nn.Sequential( + nn.Conv2d(out_channels, out_channels, 3, 1, 1), + nn.LeakyReLU(0.2, True), + nn.Conv2d(out_channels, sft_out_channels, 3, 1, 1), + ) + ) + self.load_state_dict(state_dict) + + def forward( + self, x, return_latents=False, return_rgb=True, randomize_noise=True, **kwargs + ): + """Forward function for GFPGANv1Clean. + Args: + x (Tensor): Input images. + return_latents (bool): Whether to return style latents. Default: False. + return_rgb (bool): Whether return intermediate rgb images. Default: True. + randomize_noise (bool): Randomize noise, used when 'noise' is False. Default: True. + """ + conditions = [] + unet_skips = [] + out_rgbs = [] + + # encoder + feat = F.leaky_relu_(self.conv_body_first(x), negative_slope=0.2) + for i in range(self.log_size - 2): + feat = self.conv_body_down[i](feat) + unet_skips.insert(0, feat) + feat = F.leaky_relu_(self.final_conv(feat), negative_slope=0.2) + + # style code + style_code = self.final_linear(feat.view(feat.size(0), -1)) + if self.different_w: + style_code = style_code.view(style_code.size(0), -1, self.num_style_feat) + + # decode + for i in range(self.log_size - 2): + # add unet skip + feat = feat + unet_skips[i] + # ResUpLayer + feat = self.conv_body_up[i](feat) + # generate scale and shift for SFT layers + scale = self.condition_scale[i](feat) + conditions.append(scale.clone()) + shift = self.condition_shift[i](feat) + conditions.append(shift.clone()) + # generate rgb images + if return_rgb: + out_rgbs.append(self.toRGB[i](feat)) + + # decoder + image, _ = self.stylegan_decoder( + [style_code], + conditions, + return_latents=return_latents, + input_is_latent=self.input_is_latent, + randomize_noise=randomize_noise, + ) + + return image, out_rgbs diff --git a/ldm_patched/pfn/architecture/face/restoreformer_arch.py b/ldm_patched/pfn/architecture/face/restoreformer_arch.py new file mode 100644 index 0000000000000000000000000000000000000000..4492260291d6d74b2c0d38130f7aa8b50ba2fc11 --- /dev/null +++ b/ldm_patched/pfn/architecture/face/restoreformer_arch.py @@ -0,0 +1,776 @@ +# pylint: skip-file +# type: ignore +"""Modified from https://github.com/wzhouxiff/RestoreFormer +""" +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F + + +class VectorQuantizer(nn.Module): + """ + see https://github.com/MishaLaskin/vqvae/blob/d761a999e2267766400dc646d82d3ac3657771d4/models/quantizer.py + ____________________________________________ + Discretization bottleneck part of the VQ-VAE. + Inputs: + - n_e : number of embeddings + - e_dim : dimension of embedding + - beta : commitment cost used in loss term, beta * ||z_e(x)-sg[e]||^2 + _____________________________________________ + """ + + def __init__(self, n_e, e_dim, beta): + super(VectorQuantizer, self).__init__() + self.n_e = n_e + self.e_dim = e_dim + self.beta = beta + + self.embedding = nn.Embedding(self.n_e, self.e_dim) + self.embedding.weight.data.uniform_(-1.0 / self.n_e, 1.0 / self.n_e) + + def forward(self, z): + """ + Inputs the output of the encoder network z and maps it to a discrete + one-hot vector that is the index of the closest embedding vector e_j + z (continuous) -> z_q (discrete) + z.shape = (batch, channel, height, width) + quantization pipeline: + 1. get encoder input (B,C,H,W) + 2. flatten input to (B*H*W,C) + """ + # reshape z -> (batch, height, width, channel) and flatten + z = z.permute(0, 2, 3, 1).contiguous() + z_flattened = z.view(-1, self.e_dim) + # distances from z to embeddings e_j (z - e)^2 = z^2 + e^2 - 2 e * z + + d = ( + torch.sum(z_flattened**2, dim=1, keepdim=True) + + torch.sum(self.embedding.weight**2, dim=1) + - 2 * torch.matmul(z_flattened, self.embedding.weight.t()) + ) + + # could possible replace this here + # #\start... + # find closest encodings + + min_value, min_encoding_indices = torch.min(d, dim=1) + + min_encoding_indices = min_encoding_indices.unsqueeze(1) + + min_encodings = torch.zeros(min_encoding_indices.shape[0], self.n_e).to(z) + min_encodings.scatter_(1, min_encoding_indices, 1) + + # dtype min encodings: torch.float32 + # min_encodings shape: torch.Size([2048, 512]) + # min_encoding_indices.shape: torch.Size([2048, 1]) + + # get quantized latent vectors + z_q = torch.matmul(min_encodings, self.embedding.weight).view(z.shape) + # .........\end + + # with: + # .........\start + # min_encoding_indices = torch.argmin(d, dim=1) + # z_q = self.embedding(min_encoding_indices) + # ......\end......... (TODO) + + # compute loss for embedding + loss = torch.mean((z_q.detach() - z) ** 2) + self.beta * torch.mean( + (z_q - z.detach()) ** 2 + ) + + # preserve gradients + z_q = z + (z_q - z).detach() + + # perplexity + + e_mean = torch.mean(min_encodings, dim=0) + perplexity = torch.exp(-torch.sum(e_mean * torch.log(e_mean + 1e-10))) + + # reshape back to match original input shape + z_q = z_q.permute(0, 3, 1, 2).contiguous() + + return z_q, loss, (perplexity, min_encodings, min_encoding_indices, d) + + def get_codebook_entry(self, indices, shape): + # shape specifying (batch, height, width, channel) + # TODO: check for more easy handling with nn.Embedding + min_encodings = torch.zeros(indices.shape[0], self.n_e).to(indices) + min_encodings.scatter_(1, indices[:, None], 1) + + # get quantized latent vectors + z_q = torch.matmul(min_encodings.float(), self.embedding.weight) + + if shape is not None: + z_q = z_q.view(shape) + + # reshape back to match original input shape + z_q = z_q.permute(0, 3, 1, 2).contiguous() + + return z_q + + +# pytorch_diffusion + derived encoder decoder +def nonlinearity(x): + # swish + return x * torch.sigmoid(x) + + +def Normalize(in_channels): + return torch.nn.GroupNorm( + num_groups=32, num_channels=in_channels, eps=1e-6, affine=True + ) + + +class Upsample(nn.Module): + def __init__(self, in_channels, with_conv): + super().__init__() + self.with_conv = with_conv + if self.with_conv: + self.conv = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=3, stride=1, padding=1 + ) + + def forward(self, x): + x = torch.nn.functional.interpolate(x, scale_factor=2.0, mode="nearest") + if self.with_conv: + x = self.conv(x) + return x + + +class Downsample(nn.Module): + def __init__(self, in_channels, with_conv): + super().__init__() + self.with_conv = with_conv + if self.with_conv: + # no asymmetric padding in torch conv, must do it ourselves + self.conv = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=3, stride=2, padding=0 + ) + + def forward(self, x): + if self.with_conv: + pad = (0, 1, 0, 1) + x = torch.nn.functional.pad(x, pad, mode="constant", value=0) + x = self.conv(x) + else: + x = torch.nn.functional.avg_pool2d(x, kernel_size=2, stride=2) + return x + + +class ResnetBlock(nn.Module): + def __init__( + self, + *, + in_channels, + out_channels=None, + conv_shortcut=False, + dropout, + temb_channels=512 + ): + super().__init__() + self.in_channels = in_channels + out_channels = in_channels if out_channels is None else out_channels + self.out_channels = out_channels + self.use_conv_shortcut = conv_shortcut + + self.norm1 = Normalize(in_channels) + self.conv1 = torch.nn.Conv2d( + in_channels, out_channels, kernel_size=3, stride=1, padding=1 + ) + if temb_channels > 0: + self.temb_proj = torch.nn.Linear(temb_channels, out_channels) + self.norm2 = Normalize(out_channels) + self.dropout = torch.nn.Dropout(dropout) + self.conv2 = torch.nn.Conv2d( + out_channels, out_channels, kernel_size=3, stride=1, padding=1 + ) + if self.in_channels != self.out_channels: + if self.use_conv_shortcut: + self.conv_shortcut = torch.nn.Conv2d( + in_channels, out_channels, kernel_size=3, stride=1, padding=1 + ) + else: + self.nin_shortcut = torch.nn.Conv2d( + in_channels, out_channels, kernel_size=1, stride=1, padding=0 + ) + + def forward(self, x, temb): + h = x + h = self.norm1(h) + h = nonlinearity(h) + h = self.conv1(h) + + if temb is not None: + h = h + self.temb_proj(nonlinearity(temb))[:, :, None, None] + + h = self.norm2(h) + h = nonlinearity(h) + h = self.dropout(h) + h = self.conv2(h) + + if self.in_channels != self.out_channels: + if self.use_conv_shortcut: + x = self.conv_shortcut(x) + else: + x = self.nin_shortcut(x) + + return x + h + + +class MultiHeadAttnBlock(nn.Module): + def __init__(self, in_channels, head_size=1): + super().__init__() + self.in_channels = in_channels + self.head_size = head_size + self.att_size = in_channels // head_size + assert ( + in_channels % head_size == 0 + ), "The size of head should be divided by the number of channels." + + self.norm1 = Normalize(in_channels) + self.norm2 = Normalize(in_channels) + + self.q = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + self.k = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + self.v = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + self.proj_out = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + self.num = 0 + + def forward(self, x, y=None): + h_ = x + h_ = self.norm1(h_) + if y is None: + y = h_ + else: + y = self.norm2(y) + + q = self.q(y) + k = self.k(h_) + v = self.v(h_) + + # compute attention + b, c, h, w = q.shape + q = q.reshape(b, self.head_size, self.att_size, h * w) + q = q.permute(0, 3, 1, 2) # b, hw, head, att + + k = k.reshape(b, self.head_size, self.att_size, h * w) + k = k.permute(0, 3, 1, 2) + + v = v.reshape(b, self.head_size, self.att_size, h * w) + v = v.permute(0, 3, 1, 2) + + q = q.transpose(1, 2) + v = v.transpose(1, 2) + k = k.transpose(1, 2).transpose(2, 3) + + scale = int(self.att_size) ** (-0.5) + q.mul_(scale) + w_ = torch.matmul(q, k) + w_ = F.softmax(w_, dim=3) + + w_ = w_.matmul(v) + + w_ = w_.transpose(1, 2).contiguous() # [b, h*w, head, att] + w_ = w_.view(b, h, w, -1) + w_ = w_.permute(0, 3, 1, 2) + + w_ = self.proj_out(w_) + + return x + w_ + + +class MultiHeadEncoder(nn.Module): + def __init__( + self, + ch, + out_ch, + ch_mult=(1, 2, 4, 8), + num_res_blocks=2, + attn_resolutions=(16,), + dropout=0.0, + resamp_with_conv=True, + in_channels=3, + resolution=512, + z_channels=256, + double_z=True, + enable_mid=True, + head_size=1, + **ignore_kwargs + ): + super().__init__() + self.ch = ch + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + self.enable_mid = enable_mid + + # downsampling + self.conv_in = torch.nn.Conv2d( + in_channels, self.ch, kernel_size=3, stride=1, padding=1 + ) + + curr_res = resolution + in_ch_mult = (1,) + tuple(ch_mult) + self.down = nn.ModuleList() + for i_level in range(self.num_resolutions): + block = nn.ModuleList() + attn = nn.ModuleList() + block_in = ch * in_ch_mult[i_level] + block_out = ch * ch_mult[i_level] + for i_block in range(self.num_res_blocks): + block.append( + ResnetBlock( + in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout, + ) + ) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(MultiHeadAttnBlock(block_in, head_size)) + down = nn.Module() + down.block = block + down.attn = attn + if i_level != self.num_resolutions - 1: + down.downsample = Downsample(block_in, resamp_with_conv) + curr_res = curr_res // 2 + self.down.append(down) + + # middle + if self.enable_mid: + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock( + in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + ) + self.mid.attn_1 = MultiHeadAttnBlock(block_in, head_size) + self.mid.block_2 = ResnetBlock( + in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + ) + + # end + self.norm_out = Normalize(block_in) + self.conv_out = torch.nn.Conv2d( + block_in, + 2 * z_channels if double_z else z_channels, + kernel_size=3, + stride=1, + padding=1, + ) + + def forward(self, x): + hs = {} + # timestep embedding + temb = None + + # downsampling + h = self.conv_in(x) + hs["in"] = h + for i_level in range(self.num_resolutions): + for i_block in range(self.num_res_blocks): + h = self.down[i_level].block[i_block](h, temb) + if len(self.down[i_level].attn) > 0: + h = self.down[i_level].attn[i_block](h) + + if i_level != self.num_resolutions - 1: + # hs.append(h) + hs["block_" + str(i_level)] = h + h = self.down[i_level].downsample(h) + + # middle + # h = hs[-1] + if self.enable_mid: + h = self.mid.block_1(h, temb) + hs["block_" + str(i_level) + "_atten"] = h + h = self.mid.attn_1(h) + h = self.mid.block_2(h, temb) + hs["mid_atten"] = h + + # end + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + # hs.append(h) + hs["out"] = h + + return hs + + +class MultiHeadDecoder(nn.Module): + def __init__( + self, + ch, + out_ch, + ch_mult=(1, 2, 4, 8), + num_res_blocks=2, + attn_resolutions=(16,), + dropout=0.0, + resamp_with_conv=True, + in_channels=3, + resolution=512, + z_channels=256, + give_pre_end=False, + enable_mid=True, + head_size=1, + **ignorekwargs + ): + super().__init__() + self.ch = ch + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + self.give_pre_end = give_pre_end + self.enable_mid = enable_mid + + # compute in_ch_mult, block_in and curr_res at lowest res + block_in = ch * ch_mult[self.num_resolutions - 1] + curr_res = resolution // 2 ** (self.num_resolutions - 1) + self.z_shape = (1, z_channels, curr_res, curr_res) + print( + "Working with z of shape {} = {} dimensions.".format( + self.z_shape, np.prod(self.z_shape) + ) + ) + + # z to block_in + self.conv_in = torch.nn.Conv2d( + z_channels, block_in, kernel_size=3, stride=1, padding=1 + ) + + # middle + if self.enable_mid: + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock( + in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + ) + self.mid.attn_1 = MultiHeadAttnBlock(block_in, head_size) + self.mid.block_2 = ResnetBlock( + in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + ) + + # upsampling + self.up = nn.ModuleList() + for i_level in reversed(range(self.num_resolutions)): + block = nn.ModuleList() + attn = nn.ModuleList() + block_out = ch * ch_mult[i_level] + for i_block in range(self.num_res_blocks + 1): + block.append( + ResnetBlock( + in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout, + ) + ) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(MultiHeadAttnBlock(block_in, head_size)) + up = nn.Module() + up.block = block + up.attn = attn + if i_level != 0: + up.upsample = Upsample(block_in, resamp_with_conv) + curr_res = curr_res * 2 + self.up.insert(0, up) # prepend to get consistent order + + # end + self.norm_out = Normalize(block_in) + self.conv_out = torch.nn.Conv2d( + block_in, out_ch, kernel_size=3, stride=1, padding=1 + ) + + def forward(self, z): + # assert z.shape[1:] == self.z_shape[1:] + self.last_z_shape = z.shape + + # timestep embedding + temb = None + + # z to block_in + h = self.conv_in(z) + + # middle + if self.enable_mid: + h = self.mid.block_1(h, temb) + h = self.mid.attn_1(h) + h = self.mid.block_2(h, temb) + + # upsampling + for i_level in reversed(range(self.num_resolutions)): + for i_block in range(self.num_res_blocks + 1): + h = self.up[i_level].block[i_block](h, temb) + if len(self.up[i_level].attn) > 0: + h = self.up[i_level].attn[i_block](h) + if i_level != 0: + h = self.up[i_level].upsample(h) + + # end + if self.give_pre_end: + return h + + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + return h + + +class MultiHeadDecoderTransformer(nn.Module): + def __init__( + self, + ch, + out_ch, + ch_mult=(1, 2, 4, 8), + num_res_blocks=2, + attn_resolutions=(16,), + dropout=0.0, + resamp_with_conv=True, + in_channels=3, + resolution=512, + z_channels=256, + give_pre_end=False, + enable_mid=True, + head_size=1, + **ignorekwargs + ): + super().__init__() + self.ch = ch + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + self.give_pre_end = give_pre_end + self.enable_mid = enable_mid + + # compute in_ch_mult, block_in and curr_res at lowest res + block_in = ch * ch_mult[self.num_resolutions - 1] + curr_res = resolution // 2 ** (self.num_resolutions - 1) + self.z_shape = (1, z_channels, curr_res, curr_res) + print( + "Working with z of shape {} = {} dimensions.".format( + self.z_shape, np.prod(self.z_shape) + ) + ) + + # z to block_in + self.conv_in = torch.nn.Conv2d( + z_channels, block_in, kernel_size=3, stride=1, padding=1 + ) + + # middle + if self.enable_mid: + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock( + in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + ) + self.mid.attn_1 = MultiHeadAttnBlock(block_in, head_size) + self.mid.block_2 = ResnetBlock( + in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + ) + + # upsampling + self.up = nn.ModuleList() + for i_level in reversed(range(self.num_resolutions)): + block = nn.ModuleList() + attn = nn.ModuleList() + block_out = ch * ch_mult[i_level] + for i_block in range(self.num_res_blocks + 1): + block.append( + ResnetBlock( + in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout, + ) + ) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(MultiHeadAttnBlock(block_in, head_size)) + up = nn.Module() + up.block = block + up.attn = attn + if i_level != 0: + up.upsample = Upsample(block_in, resamp_with_conv) + curr_res = curr_res * 2 + self.up.insert(0, up) # prepend to get consistent order + + # end + self.norm_out = Normalize(block_in) + self.conv_out = torch.nn.Conv2d( + block_in, out_ch, kernel_size=3, stride=1, padding=1 + ) + + def forward(self, z, hs): + # assert z.shape[1:] == self.z_shape[1:] + # self.last_z_shape = z.shape + + # timestep embedding + temb = None + + # z to block_in + h = self.conv_in(z) + + # middle + if self.enable_mid: + h = self.mid.block_1(h, temb) + h = self.mid.attn_1(h, hs["mid_atten"]) + h = self.mid.block_2(h, temb) + + # upsampling + for i_level in reversed(range(self.num_resolutions)): + for i_block in range(self.num_res_blocks + 1): + h = self.up[i_level].block[i_block](h, temb) + if len(self.up[i_level].attn) > 0: + h = self.up[i_level].attn[i_block]( + h, hs["block_" + str(i_level) + "_atten"] + ) + # hfeature = h.clone() + if i_level != 0: + h = self.up[i_level].upsample(h) + + # end + if self.give_pre_end: + return h + + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + return h + + +class RestoreFormer(nn.Module): + def __init__( + self, + state_dict, + ): + super(RestoreFormer, self).__init__() + + n_embed = 1024 + embed_dim = 256 + ch = 64 + out_ch = 3 + ch_mult = (1, 2, 2, 4, 4, 8) + num_res_blocks = 2 + attn_resolutions = (16,) + dropout = 0.0 + in_channels = 3 + resolution = 512 + z_channels = 256 + double_z = False + enable_mid = True + fix_decoder = False + fix_codebook = True + fix_encoder = False + head_size = 8 + + self.model_arch = "RestoreFormer" + self.sub_type = "Face SR" + self.scale = 8 + self.in_nc = 3 + self.out_nc = out_ch + self.state = state_dict + + self.supports_fp16 = False + self.supports_bf16 = True + self.min_size_restriction = 16 + + self.encoder = MultiHeadEncoder( + ch=ch, + out_ch=out_ch, + ch_mult=ch_mult, + num_res_blocks=num_res_blocks, + attn_resolutions=attn_resolutions, + dropout=dropout, + in_channels=in_channels, + resolution=resolution, + z_channels=z_channels, + double_z=double_z, + enable_mid=enable_mid, + head_size=head_size, + ) + self.decoder = MultiHeadDecoderTransformer( + ch=ch, + out_ch=out_ch, + ch_mult=ch_mult, + num_res_blocks=num_res_blocks, + attn_resolutions=attn_resolutions, + dropout=dropout, + in_channels=in_channels, + resolution=resolution, + z_channels=z_channels, + enable_mid=enable_mid, + head_size=head_size, + ) + + self.quantize = VectorQuantizer(n_embed, embed_dim, beta=0.25) + + self.quant_conv = torch.nn.Conv2d(z_channels, embed_dim, 1) + self.post_quant_conv = torch.nn.Conv2d(embed_dim, z_channels, 1) + + if fix_decoder: + for _, param in self.decoder.named_parameters(): + param.requires_grad = False + for _, param in self.post_quant_conv.named_parameters(): + param.requires_grad = False + for _, param in self.quantize.named_parameters(): + param.requires_grad = False + elif fix_codebook: + for _, param in self.quantize.named_parameters(): + param.requires_grad = False + + if fix_encoder: + for _, param in self.encoder.named_parameters(): + param.requires_grad = False + + self.load_state_dict(state_dict) + + def encode(self, x): + hs = self.encoder(x) + h = self.quant_conv(hs["out"]) + quant, emb_loss, info = self.quantize(h) + return quant, emb_loss, info, hs + + def decode(self, quant, hs): + quant = self.post_quant_conv(quant) + dec = self.decoder(quant, hs) + + return dec + + def forward(self, input, **kwargs): + quant, diff, info, hs = self.encode(input) + dec = self.decode(quant, hs) + + return dec, None diff --git a/ldm_patched/pfn/architecture/face/stylegan2_arch.py b/ldm_patched/pfn/architecture/face/stylegan2_arch.py new file mode 100644 index 0000000000000000000000000000000000000000..1eb0e9f15f706e2b9759bde4d0244d424c3ae76f --- /dev/null +++ b/ldm_patched/pfn/architecture/face/stylegan2_arch.py @@ -0,0 +1,865 @@ +# pylint: skip-file +# type: ignore +import math +import random + +import torch +from torch import nn +from torch.nn import functional as F + +from .fused_act import FusedLeakyReLU, fused_leaky_relu +from .upfirdn2d import upfirdn2d + + +class NormStyleCode(nn.Module): + def forward(self, x): + """Normalize the style codes. + + Args: + x (Tensor): Style codes with shape (b, c). + + Returns: + Tensor: Normalized tensor. + """ + return x * torch.rsqrt(torch.mean(x**2, dim=1, keepdim=True) + 1e-8) + + +def make_resample_kernel(k): + """Make resampling kernel for UpFirDn. + + Args: + k (list[int]): A list indicating the 1D resample kernel magnitude. + + Returns: + Tensor: 2D resampled kernel. + """ + k = torch.tensor(k, dtype=torch.float32) + if k.ndim == 1: + k = k[None, :] * k[:, None] # to 2D kernel, outer product + # normalize + k /= k.sum() + return k + + +class UpFirDnUpsample(nn.Module): + """Upsample, FIR filter, and downsample (upsampole version). + + References: + 1. https://docs.scipy.org/doc/scipy/reference/generated/scipy.signal.upfirdn.html # noqa: E501 + 2. http://www.ece.northwestern.edu/local-apps/matlabhelp/toolbox/signal/upfirdn.html # noqa: E501 + + Args: + resample_kernel (list[int]): A list indicating the 1D resample kernel + magnitude. + factor (int): Upsampling scale factor. Default: 2. + """ + + def __init__(self, resample_kernel, factor=2): + super(UpFirDnUpsample, self).__init__() + self.kernel = make_resample_kernel(resample_kernel) * (factor**2) + self.factor = factor + + pad = self.kernel.shape[0] - factor + self.pad = ((pad + 1) // 2 + factor - 1, pad // 2) + + def forward(self, x): + out = upfirdn2d(x, self.kernel.type_as(x), up=self.factor, down=1, pad=self.pad) + return out + + def __repr__(self): + return f"{self.__class__.__name__}(factor={self.factor})" + + +class UpFirDnDownsample(nn.Module): + """Upsample, FIR filter, and downsample (downsampole version). + + Args: + resample_kernel (list[int]): A list indicating the 1D resample kernel + magnitude. + factor (int): Downsampling scale factor. Default: 2. + """ + + def __init__(self, resample_kernel, factor=2): + super(UpFirDnDownsample, self).__init__() + self.kernel = make_resample_kernel(resample_kernel) + self.factor = factor + + pad = self.kernel.shape[0] - factor + self.pad = ((pad + 1) // 2, pad // 2) + + def forward(self, x): + out = upfirdn2d(x, self.kernel.type_as(x), up=1, down=self.factor, pad=self.pad) + return out + + def __repr__(self): + return f"{self.__class__.__name__}(factor={self.factor})" + + +class UpFirDnSmooth(nn.Module): + """Upsample, FIR filter, and downsample (smooth version). + + Args: + resample_kernel (list[int]): A list indicating the 1D resample kernel + magnitude. + upsample_factor (int): Upsampling scale factor. Default: 1. + downsample_factor (int): Downsampling scale factor. Default: 1. + kernel_size (int): Kernel size: Default: 1. + """ + + def __init__( + self, resample_kernel, upsample_factor=1, downsample_factor=1, kernel_size=1 + ): + super(UpFirDnSmooth, self).__init__() + self.upsample_factor = upsample_factor + self.downsample_factor = downsample_factor + self.kernel = make_resample_kernel(resample_kernel) + if upsample_factor > 1: + self.kernel = self.kernel * (upsample_factor**2) + + if upsample_factor > 1: + pad = (self.kernel.shape[0] - upsample_factor) - (kernel_size - 1) + self.pad = ((pad + 1) // 2 + upsample_factor - 1, pad // 2 + 1) + elif downsample_factor > 1: + pad = (self.kernel.shape[0] - downsample_factor) + (kernel_size - 1) + self.pad = ((pad + 1) // 2, pad // 2) + else: + raise NotImplementedError + + def forward(self, x): + out = upfirdn2d(x, self.kernel.type_as(x), up=1, down=1, pad=self.pad) + return out + + def __repr__(self): + return ( + f"{self.__class__.__name__}(upsample_factor={self.upsample_factor}" + f", downsample_factor={self.downsample_factor})" + ) + + +class EqualLinear(nn.Module): + """Equalized Linear as StyleGAN2. + + Args: + in_channels (int): Size of each sample. + out_channels (int): Size of each output sample. + bias (bool): If set to ``False``, the layer will not learn an additive + bias. Default: ``True``. + bias_init_val (float): Bias initialized value. Default: 0. + lr_mul (float): Learning rate multiplier. Default: 1. + activation (None | str): The activation after ``linear`` operation. + Supported: 'fused_lrelu', None. Default: None. + """ + + def __init__( + self, + in_channels, + out_channels, + bias=True, + bias_init_val=0, + lr_mul=1, + activation=None, + ): + super(EqualLinear, self).__init__() + self.in_channels = in_channels + self.out_channels = out_channels + self.lr_mul = lr_mul + self.activation = activation + if self.activation not in ["fused_lrelu", None]: + raise ValueError( + f"Wrong activation value in EqualLinear: {activation}" + "Supported ones are: ['fused_lrelu', None]." + ) + self.scale = (1 / math.sqrt(in_channels)) * lr_mul + + self.weight = nn.Parameter(torch.randn(out_channels, in_channels).div_(lr_mul)) + if bias: + self.bias = nn.Parameter(torch.zeros(out_channels).fill_(bias_init_val)) + else: + self.register_parameter("bias", None) + + def forward(self, x): + if self.bias is None: + bias = None + else: + bias = self.bias * self.lr_mul + if self.activation == "fused_lrelu": + out = F.linear(x, self.weight * self.scale) + out = fused_leaky_relu(out, bias) + else: + out = F.linear(x, self.weight * self.scale, bias=bias) + return out + + def __repr__(self): + return ( + f"{self.__class__.__name__}(in_channels={self.in_channels}, " + f"out_channels={self.out_channels}, bias={self.bias is not None})" + ) + + +class ModulatedConv2d(nn.Module): + """Modulated Conv2d used in StyleGAN2. + + There is no bias in ModulatedConv2d. + + Args: + in_channels (int): Channel number of the input. + out_channels (int): Channel number of the output. + kernel_size (int): Size of the convolving kernel. + num_style_feat (int): Channel number of style features. + demodulate (bool): Whether to demodulate in the conv layer. + Default: True. + sample_mode (str | None): Indicating 'upsample', 'downsample' or None. + Default: None. + resample_kernel (list[int]): A list indicating the 1D resample kernel + magnitude. Default: (1, 3, 3, 1). + eps (float): A value added to the denominator for numerical stability. + Default: 1e-8. + """ + + def __init__( + self, + in_channels, + out_channels, + kernel_size, + num_style_feat, + demodulate=True, + sample_mode=None, + resample_kernel=(1, 3, 3, 1), + eps=1e-8, + ): + super(ModulatedConv2d, self).__init__() + self.in_channels = in_channels + self.out_channels = out_channels + self.kernel_size = kernel_size + self.demodulate = demodulate + self.sample_mode = sample_mode + self.eps = eps + + if self.sample_mode == "upsample": + self.smooth = UpFirDnSmooth( + resample_kernel, + upsample_factor=2, + downsample_factor=1, + kernel_size=kernel_size, + ) + elif self.sample_mode == "downsample": + self.smooth = UpFirDnSmooth( + resample_kernel, + upsample_factor=1, + downsample_factor=2, + kernel_size=kernel_size, + ) + elif self.sample_mode is None: + pass + else: + raise ValueError( + f"Wrong sample mode {self.sample_mode}, " + "supported ones are ['upsample', 'downsample', None]." + ) + + self.scale = 1 / math.sqrt(in_channels * kernel_size**2) + # modulation inside each modulated conv + self.modulation = EqualLinear( + num_style_feat, + in_channels, + bias=True, + bias_init_val=1, + lr_mul=1, + activation=None, + ) + + self.weight = nn.Parameter( + torch.randn(1, out_channels, in_channels, kernel_size, kernel_size) + ) + self.padding = kernel_size // 2 + + def forward(self, x, style): + """Forward function. + + Args: + x (Tensor): Tensor with shape (b, c, h, w). + style (Tensor): Tensor with shape (b, num_style_feat). + + Returns: + Tensor: Modulated tensor after convolution. + """ + b, c, h, w = x.shape # c = c_in + # weight modulation + style = self.modulation(style).view(b, 1, c, 1, 1) + # self.weight: (1, c_out, c_in, k, k); style: (b, 1, c, 1, 1) + weight = self.scale * self.weight * style # (b, c_out, c_in, k, k) + + if self.demodulate: + demod = torch.rsqrt(weight.pow(2).sum([2, 3, 4]) + self.eps) + weight = weight * demod.view(b, self.out_channels, 1, 1, 1) + + weight = weight.view( + b * self.out_channels, c, self.kernel_size, self.kernel_size + ) + + if self.sample_mode == "upsample": + x = x.view(1, b * c, h, w) + weight = weight.view( + b, self.out_channels, c, self.kernel_size, self.kernel_size + ) + weight = weight.transpose(1, 2).reshape( + b * c, self.out_channels, self.kernel_size, self.kernel_size + ) + out = F.conv_transpose2d(x, weight, padding=0, stride=2, groups=b) + out = out.view(b, self.out_channels, *out.shape[2:4]) + out = self.smooth(out) + elif self.sample_mode == "downsample": + x = self.smooth(x) + x = x.view(1, b * c, *x.shape[2:4]) + out = F.conv2d(x, weight, padding=0, stride=2, groups=b) + out = out.view(b, self.out_channels, *out.shape[2:4]) + else: + x = x.view(1, b * c, h, w) + # weight: (b*c_out, c_in, k, k), groups=b + out = F.conv2d(x, weight, padding=self.padding, groups=b) + out = out.view(b, self.out_channels, *out.shape[2:4]) + + return out + + def __repr__(self): + return ( + f"{self.__class__.__name__}(in_channels={self.in_channels}, " + f"out_channels={self.out_channels}, " + f"kernel_size={self.kernel_size}, " + f"demodulate={self.demodulate}, sample_mode={self.sample_mode})" + ) + + +class StyleConv(nn.Module): + """Style conv. + + Args: + in_channels (int): Channel number of the input. + out_channels (int): Channel number of the output. + kernel_size (int): Size of the convolving kernel. + num_style_feat (int): Channel number of style features. + demodulate (bool): Whether demodulate in the conv layer. Default: True. + sample_mode (str | None): Indicating 'upsample', 'downsample' or None. + Default: None. + resample_kernel (list[int]): A list indicating the 1D resample kernel + magnitude. Default: (1, 3, 3, 1). + """ + + def __init__( + self, + in_channels, + out_channels, + kernel_size, + num_style_feat, + demodulate=True, + sample_mode=None, + resample_kernel=(1, 3, 3, 1), + ): + super(StyleConv, self).__init__() + self.modulated_conv = ModulatedConv2d( + in_channels, + out_channels, + kernel_size, + num_style_feat, + demodulate=demodulate, + sample_mode=sample_mode, + resample_kernel=resample_kernel, + ) + self.weight = nn.Parameter(torch.zeros(1)) # for noise injection + self.activate = FusedLeakyReLU(out_channels) + + def forward(self, x, style, noise=None): + # modulate + out = self.modulated_conv(x, style) + # noise injection + if noise is None: + b, _, h, w = out.shape + noise = out.new_empty(b, 1, h, w).normal_() + out = out + self.weight * noise + # activation (with bias) + out = self.activate(out) + return out + + +class ToRGB(nn.Module): + """To RGB from features. + + Args: + in_channels (int): Channel number of input. + num_style_feat (int): Channel number of style features. + upsample (bool): Whether to upsample. Default: True. + resample_kernel (list[int]): A list indicating the 1D resample kernel + magnitude. Default: (1, 3, 3, 1). + """ + + def __init__( + self, in_channels, num_style_feat, upsample=True, resample_kernel=(1, 3, 3, 1) + ): + super(ToRGB, self).__init__() + if upsample: + self.upsample = UpFirDnUpsample(resample_kernel, factor=2) + else: + self.upsample = None + self.modulated_conv = ModulatedConv2d( + in_channels, + 3, + kernel_size=1, + num_style_feat=num_style_feat, + demodulate=False, + sample_mode=None, + ) + self.bias = nn.Parameter(torch.zeros(1, 3, 1, 1)) + + def forward(self, x, style, skip=None): + """Forward function. + + Args: + x (Tensor): Feature tensor with shape (b, c, h, w). + style (Tensor): Tensor with shape (b, num_style_feat). + skip (Tensor): Base/skip tensor. Default: None. + + Returns: + Tensor: RGB images. + """ + out = self.modulated_conv(x, style) + out = out + self.bias + if skip is not None: + if self.upsample: + skip = self.upsample(skip) + out = out + skip + return out + + +class ConstantInput(nn.Module): + """Constant input. + + Args: + num_channel (int): Channel number of constant input. + size (int): Spatial size of constant input. + """ + + def __init__(self, num_channel, size): + super(ConstantInput, self).__init__() + self.weight = nn.Parameter(torch.randn(1, num_channel, size, size)) + + def forward(self, batch): + out = self.weight.repeat(batch, 1, 1, 1) + return out + + +class StyleGAN2Generator(nn.Module): + """StyleGAN2 Generator. + + Args: + out_size (int): The spatial size of outputs. + num_style_feat (int): Channel number of style features. Default: 512. + num_mlp (int): Layer number of MLP style layers. Default: 8. + channel_multiplier (int): Channel multiplier for large networks of + StyleGAN2. Default: 2. + resample_kernel (list[int]): A list indicating the 1D resample kernel + magnitude. A cross production will be applied to extent 1D resample + kernel to 2D resample kernel. Default: (1, 3, 3, 1). + lr_mlp (float): Learning rate multiplier for mlp layers. Default: 0.01. + narrow (float): Narrow ratio for channels. Default: 1.0. + """ + + def __init__( + self, + out_size, + num_style_feat=512, + num_mlp=8, + channel_multiplier=2, + resample_kernel=(1, 3, 3, 1), + lr_mlp=0.01, + narrow=1, + ): + super(StyleGAN2Generator, self).__init__() + # Style MLP layers + self.num_style_feat = num_style_feat + style_mlp_layers = [NormStyleCode()] + for i in range(num_mlp): + style_mlp_layers.append( + EqualLinear( + num_style_feat, + num_style_feat, + bias=True, + bias_init_val=0, + lr_mul=lr_mlp, + activation="fused_lrelu", + ) + ) + self.style_mlp = nn.Sequential(*style_mlp_layers) + + channels = { + "4": int(512 * narrow), + "8": int(512 * narrow), + "16": int(512 * narrow), + "32": int(512 * narrow), + "64": int(256 * channel_multiplier * narrow), + "128": int(128 * channel_multiplier * narrow), + "256": int(64 * channel_multiplier * narrow), + "512": int(32 * channel_multiplier * narrow), + "1024": int(16 * channel_multiplier * narrow), + } + self.channels = channels + + self.constant_input = ConstantInput(channels["4"], size=4) + self.style_conv1 = StyleConv( + channels["4"], + channels["4"], + kernel_size=3, + num_style_feat=num_style_feat, + demodulate=True, + sample_mode=None, + resample_kernel=resample_kernel, + ) + self.to_rgb1 = ToRGB( + channels["4"], + num_style_feat, + upsample=False, + resample_kernel=resample_kernel, + ) + + self.log_size = int(math.log(out_size, 2)) + self.num_layers = (self.log_size - 2) * 2 + 1 + self.num_latent = self.log_size * 2 - 2 + + self.style_convs = nn.ModuleList() + self.to_rgbs = nn.ModuleList() + self.noises = nn.Module() + + in_channels = channels["4"] + # noise + for layer_idx in range(self.num_layers): + resolution = 2 ** ((layer_idx + 5) // 2) + shape = [1, 1, resolution, resolution] + self.noises.register_buffer(f"noise{layer_idx}", torch.randn(*shape)) + # style convs and to_rgbs + for i in range(3, self.log_size + 1): + out_channels = channels[f"{2**i}"] + self.style_convs.append( + StyleConv( + in_channels, + out_channels, + kernel_size=3, + num_style_feat=num_style_feat, + demodulate=True, + sample_mode="upsample", + resample_kernel=resample_kernel, + ) + ) + self.style_convs.append( + StyleConv( + out_channels, + out_channels, + kernel_size=3, + num_style_feat=num_style_feat, + demodulate=True, + sample_mode=None, + resample_kernel=resample_kernel, + ) + ) + self.to_rgbs.append( + ToRGB( + out_channels, + num_style_feat, + upsample=True, + resample_kernel=resample_kernel, + ) + ) + in_channels = out_channels + + def make_noise(self): + """Make noise for noise injection.""" + device = self.constant_input.weight.device + noises = [torch.randn(1, 1, 4, 4, device=device)] + + for i in range(3, self.log_size + 1): + for _ in range(2): + noises.append(torch.randn(1, 1, 2**i, 2**i, device=device)) + + return noises + + def get_latent(self, x): + return self.style_mlp(x) + + def mean_latent(self, num_latent): + latent_in = torch.randn( + num_latent, self.num_style_feat, device=self.constant_input.weight.device + ) + latent = self.style_mlp(latent_in).mean(0, keepdim=True) + return latent + + def forward( + self, + styles, + input_is_latent=False, + noise=None, + randomize_noise=True, + truncation=1, + truncation_latent=None, + inject_index=None, + return_latents=False, + ): + """Forward function for StyleGAN2Generator. + + Args: + styles (list[Tensor]): Sample codes of styles. + input_is_latent (bool): Whether input is latent style. + Default: False. + noise (Tensor | None): Input noise or None. Default: None. + randomize_noise (bool): Randomize noise, used when 'noise' is + False. Default: True. + truncation (float): TODO. Default: 1. + truncation_latent (Tensor | None): TODO. Default: None. + inject_index (int | None): The injection index for mixing noise. + Default: None. + return_latents (bool): Whether to return style latents. + Default: False. + """ + # style codes -> latents with Style MLP layer + if not input_is_latent: + styles = [self.style_mlp(s) for s in styles] + # noises + if noise is None: + if randomize_noise: + noise = [None] * self.num_layers # for each style conv layer + else: # use the stored noise + noise = [ + getattr(self.noises, f"noise{i}") for i in range(self.num_layers) + ] + # style truncation + if truncation < 1: + style_truncation = [] + for style in styles: + style_truncation.append( + truncation_latent + truncation * (style - truncation_latent) + ) + styles = style_truncation + # get style latent with injection + if len(styles) == 1: + inject_index = self.num_latent + + if styles[0].ndim < 3: + # repeat latent code for all the layers + latent = styles[0].unsqueeze(1).repeat(1, inject_index, 1) + else: # used for encoder with different latent code for each layer + latent = styles[0] + elif len(styles) == 2: # mixing noises + if inject_index is None: + inject_index = random.randint(1, self.num_latent - 1) + latent1 = styles[0].unsqueeze(1).repeat(1, inject_index, 1) + latent2 = ( + styles[1].unsqueeze(1).repeat(1, self.num_latent - inject_index, 1) + ) + latent = torch.cat([latent1, latent2], 1) + + # main generation + out = self.constant_input(latent.shape[0]) + out = self.style_conv1(out, latent[:, 0], noise=noise[0]) + skip = self.to_rgb1(out, latent[:, 1]) + + i = 1 + for conv1, conv2, noise1, noise2, to_rgb in zip( + self.style_convs[::2], + self.style_convs[1::2], + noise[1::2], + noise[2::2], + self.to_rgbs, + ): + out = conv1(out, latent[:, i], noise=noise1) + out = conv2(out, latent[:, i + 1], noise=noise2) + skip = to_rgb(out, latent[:, i + 2], skip) + i += 2 + + image = skip + + if return_latents: + return image, latent + else: + return image, None + + +class ScaledLeakyReLU(nn.Module): + """Scaled LeakyReLU. + + Args: + negative_slope (float): Negative slope. Default: 0.2. + """ + + def __init__(self, negative_slope=0.2): + super(ScaledLeakyReLU, self).__init__() + self.negative_slope = negative_slope + + def forward(self, x): + out = F.leaky_relu(x, negative_slope=self.negative_slope) + return out * math.sqrt(2) + + +class EqualConv2d(nn.Module): + """Equalized Linear as StyleGAN2. + + Args: + in_channels (int): Channel number of the input. + out_channels (int): Channel number of the output. + kernel_size (int): Size of the convolving kernel. + stride (int): Stride of the convolution. Default: 1 + padding (int): Zero-padding added to both sides of the input. + Default: 0. + bias (bool): If ``True``, adds a learnable bias to the output. + Default: ``True``. + bias_init_val (float): Bias initialized value. Default: 0. + """ + + def __init__( + self, + in_channels, + out_channels, + kernel_size, + stride=1, + padding=0, + bias=True, + bias_init_val=0, + ): + super(EqualConv2d, self).__init__() + self.in_channels = in_channels + self.out_channels = out_channels + self.kernel_size = kernel_size + self.stride = stride + self.padding = padding + self.scale = 1 / math.sqrt(in_channels * kernel_size**2) + + self.weight = nn.Parameter( + torch.randn(out_channels, in_channels, kernel_size, kernel_size) + ) + if bias: + self.bias = nn.Parameter(torch.zeros(out_channels).fill_(bias_init_val)) + else: + self.register_parameter("bias", None) + + def forward(self, x): + out = F.conv2d( + x, + self.weight * self.scale, + bias=self.bias, + stride=self.stride, + padding=self.padding, + ) + + return out + + def __repr__(self): + return ( + f"{self.__class__.__name__}(in_channels={self.in_channels}, " + f"out_channels={self.out_channels}, " + f"kernel_size={self.kernel_size}," + f" stride={self.stride}, padding={self.padding}, " + f"bias={self.bias is not None})" + ) + + +class ConvLayer(nn.Sequential): + """Conv Layer used in StyleGAN2 Discriminator. + + Args: + in_channels (int): Channel number of the input. + out_channels (int): Channel number of the output. + kernel_size (int): Kernel size. + downsample (bool): Whether downsample by a factor of 2. + Default: False. + resample_kernel (list[int]): A list indicating the 1D resample + kernel magnitude. A cross production will be applied to + extent 1D resample kernel to 2D resample kernel. + Default: (1, 3, 3, 1). + bias (bool): Whether with bias. Default: True. + activate (bool): Whether use activateion. Default: True. + """ + + def __init__( + self, + in_channels, + out_channels, + kernel_size, + downsample=False, + resample_kernel=(1, 3, 3, 1), + bias=True, + activate=True, + ): + layers = [] + # downsample + if downsample: + layers.append( + UpFirDnSmooth( + resample_kernel, + upsample_factor=1, + downsample_factor=2, + kernel_size=kernel_size, + ) + ) + stride = 2 + self.padding = 0 + else: + stride = 1 + self.padding = kernel_size // 2 + # conv + layers.append( + EqualConv2d( + in_channels, + out_channels, + kernel_size, + stride=stride, + padding=self.padding, + bias=bias and not activate, + ) + ) + # activation + if activate: + if bias: + layers.append(FusedLeakyReLU(out_channels)) + else: + layers.append(ScaledLeakyReLU(0.2)) + + super(ConvLayer, self).__init__(*layers) + + +class ResBlock(nn.Module): + """Residual block used in StyleGAN2 Discriminator. + + Args: + in_channels (int): Channel number of the input. + out_channels (int): Channel number of the output. + resample_kernel (list[int]): A list indicating the 1D resample + kernel magnitude. A cross production will be applied to + extent 1D resample kernel to 2D resample kernel. + Default: (1, 3, 3, 1). + """ + + def __init__(self, in_channels, out_channels, resample_kernel=(1, 3, 3, 1)): + super(ResBlock, self).__init__() + + self.conv1 = ConvLayer(in_channels, in_channels, 3, bias=True, activate=True) + self.conv2 = ConvLayer( + in_channels, + out_channels, + 3, + downsample=True, + resample_kernel=resample_kernel, + bias=True, + activate=True, + ) + self.skip = ConvLayer( + in_channels, + out_channels, + 1, + downsample=True, + resample_kernel=resample_kernel, + bias=False, + activate=False, + ) + + def forward(self, x): + out = self.conv1(x) + out = self.conv2(out) + skip = self.skip(x) + out = (out + skip) / math.sqrt(2) + return out diff --git a/ldm_patched/pfn/architecture/face/stylegan2_bilinear_arch.py b/ldm_patched/pfn/architecture/face/stylegan2_bilinear_arch.py new file mode 100644 index 0000000000000000000000000000000000000000..601f8cc4b33bdbb371d710a2bb0656e8ce102e26 --- /dev/null +++ b/ldm_patched/pfn/architecture/face/stylegan2_bilinear_arch.py @@ -0,0 +1,709 @@ +# pylint: skip-file +# type: ignore +import math +import random + +import torch +from torch import nn +from torch.nn import functional as F + +from .fused_act import FusedLeakyReLU, fused_leaky_relu + + +class NormStyleCode(nn.Module): + def forward(self, x): + """Normalize the style codes. + Args: + x (Tensor): Style codes with shape (b, c). + Returns: + Tensor: Normalized tensor. + """ + return x * torch.rsqrt(torch.mean(x**2, dim=1, keepdim=True) + 1e-8) + + +class EqualLinear(nn.Module): + """Equalized Linear as StyleGAN2. + Args: + in_channels (int): Size of each sample. + out_channels (int): Size of each output sample. + bias (bool): If set to ``False``, the layer will not learn an additive + bias. Default: ``True``. + bias_init_val (float): Bias initialized value. Default: 0. + lr_mul (float): Learning rate multiplier. Default: 1. + activation (None | str): The activation after ``linear`` operation. + Supported: 'fused_lrelu', None. Default: None. + """ + + def __init__( + self, + in_channels, + out_channels, + bias=True, + bias_init_val=0, + lr_mul=1, + activation=None, + ): + super(EqualLinear, self).__init__() + self.in_channels = in_channels + self.out_channels = out_channels + self.lr_mul = lr_mul + self.activation = activation + if self.activation not in ["fused_lrelu", None]: + raise ValueError( + f"Wrong activation value in EqualLinear: {activation}" + "Supported ones are: ['fused_lrelu', None]." + ) + self.scale = (1 / math.sqrt(in_channels)) * lr_mul + + self.weight = nn.Parameter(torch.randn(out_channels, in_channels).div_(lr_mul)) + if bias: + self.bias = nn.Parameter(torch.zeros(out_channels).fill_(bias_init_val)) + else: + self.register_parameter("bias", None) + + def forward(self, x): + if self.bias is None: + bias = None + else: + bias = self.bias * self.lr_mul + if self.activation == "fused_lrelu": + out = F.linear(x, self.weight * self.scale) + out = fused_leaky_relu(out, bias) + else: + out = F.linear(x, self.weight * self.scale, bias=bias) + return out + + def __repr__(self): + return ( + f"{self.__class__.__name__}(in_channels={self.in_channels}, " + f"out_channels={self.out_channels}, bias={self.bias is not None})" + ) + + +class ModulatedConv2d(nn.Module): + """Modulated Conv2d used in StyleGAN2. + There is no bias in ModulatedConv2d. + Args: + in_channels (int): Channel number of the input. + out_channels (int): Channel number of the output. + kernel_size (int): Size of the convolving kernel. + num_style_feat (int): Channel number of style features. + demodulate (bool): Whether to demodulate in the conv layer. + Default: True. + sample_mode (str | None): Indicating 'upsample', 'downsample' or None. + Default: None. + eps (float): A value added to the denominator for numerical stability. + Default: 1e-8. + """ + + def __init__( + self, + in_channels, + out_channels, + kernel_size, + num_style_feat, + demodulate=True, + sample_mode=None, + eps=1e-8, + interpolation_mode="bilinear", + ): + super(ModulatedConv2d, self).__init__() + self.in_channels = in_channels + self.out_channels = out_channels + self.kernel_size = kernel_size + self.demodulate = demodulate + self.sample_mode = sample_mode + self.eps = eps + self.interpolation_mode = interpolation_mode + if self.interpolation_mode == "nearest": + self.align_corners = None + else: + self.align_corners = False + + self.scale = 1 / math.sqrt(in_channels * kernel_size**2) + # modulation inside each modulated conv + self.modulation = EqualLinear( + num_style_feat, + in_channels, + bias=True, + bias_init_val=1, + lr_mul=1, + activation=None, + ) + + self.weight = nn.Parameter( + torch.randn(1, out_channels, in_channels, kernel_size, kernel_size) + ) + self.padding = kernel_size // 2 + + def forward(self, x, style): + """Forward function. + Args: + x (Tensor): Tensor with shape (b, c, h, w). + style (Tensor): Tensor with shape (b, num_style_feat). + Returns: + Tensor: Modulated tensor after convolution. + """ + b, c, h, w = x.shape # c = c_in + # weight modulation + style = self.modulation(style).view(b, 1, c, 1, 1) + # self.weight: (1, c_out, c_in, k, k); style: (b, 1, c, 1, 1) + weight = self.scale * self.weight * style # (b, c_out, c_in, k, k) + + if self.demodulate: + demod = torch.rsqrt(weight.pow(2).sum([2, 3, 4]) + self.eps) + weight = weight * demod.view(b, self.out_channels, 1, 1, 1) + + weight = weight.view( + b * self.out_channels, c, self.kernel_size, self.kernel_size + ) + + if self.sample_mode == "upsample": + x = F.interpolate( + x, + scale_factor=2, + mode=self.interpolation_mode, + align_corners=self.align_corners, + ) + elif self.sample_mode == "downsample": + x = F.interpolate( + x, + scale_factor=0.5, + mode=self.interpolation_mode, + align_corners=self.align_corners, + ) + + b, c, h, w = x.shape + x = x.view(1, b * c, h, w) + # weight: (b*c_out, c_in, k, k), groups=b + out = F.conv2d(x, weight, padding=self.padding, groups=b) + out = out.view(b, self.out_channels, *out.shape[2:4]) + + return out + + def __repr__(self): + return ( + f"{self.__class__.__name__}(in_channels={self.in_channels}, " + f"out_channels={self.out_channels}, " + f"kernel_size={self.kernel_size}, " + f"demodulate={self.demodulate}, sample_mode={self.sample_mode})" + ) + + +class StyleConv(nn.Module): + """Style conv. + Args: + in_channels (int): Channel number of the input. + out_channels (int): Channel number of the output. + kernel_size (int): Size of the convolving kernel. + num_style_feat (int): Channel number of style features. + demodulate (bool): Whether demodulate in the conv layer. Default: True. + sample_mode (str | None): Indicating 'upsample', 'downsample' or None. + Default: None. + """ + + def __init__( + self, + in_channels, + out_channels, + kernel_size, + num_style_feat, + demodulate=True, + sample_mode=None, + interpolation_mode="bilinear", + ): + super(StyleConv, self).__init__() + self.modulated_conv = ModulatedConv2d( + in_channels, + out_channels, + kernel_size, + num_style_feat, + demodulate=demodulate, + sample_mode=sample_mode, + interpolation_mode=interpolation_mode, + ) + self.weight = nn.Parameter(torch.zeros(1)) # for noise injection + self.activate = FusedLeakyReLU(out_channels) + + def forward(self, x, style, noise=None): + # modulate + out = self.modulated_conv(x, style) + # noise injection + if noise is None: + b, _, h, w = out.shape + noise = out.new_empty(b, 1, h, w).normal_() + out = out + self.weight * noise + # activation (with bias) + out = self.activate(out) + return out + + +class ToRGB(nn.Module): + """To RGB from features. + Args: + in_channels (int): Channel number of input. + num_style_feat (int): Channel number of style features. + upsample (bool): Whether to upsample. Default: True. + """ + + def __init__( + self, in_channels, num_style_feat, upsample=True, interpolation_mode="bilinear" + ): + super(ToRGB, self).__init__() + self.upsample = upsample + self.interpolation_mode = interpolation_mode + if self.interpolation_mode == "nearest": + self.align_corners = None + else: + self.align_corners = False + self.modulated_conv = ModulatedConv2d( + in_channels, + 3, + kernel_size=1, + num_style_feat=num_style_feat, + demodulate=False, + sample_mode=None, + interpolation_mode=interpolation_mode, + ) + self.bias = nn.Parameter(torch.zeros(1, 3, 1, 1)) + + def forward(self, x, style, skip=None): + """Forward function. + Args: + x (Tensor): Feature tensor with shape (b, c, h, w). + style (Tensor): Tensor with shape (b, num_style_feat). + skip (Tensor): Base/skip tensor. Default: None. + Returns: + Tensor: RGB images. + """ + out = self.modulated_conv(x, style) + out = out + self.bias + if skip is not None: + if self.upsample: + skip = F.interpolate( + skip, + scale_factor=2, + mode=self.interpolation_mode, + align_corners=self.align_corners, + ) + out = out + skip + return out + + +class ConstantInput(nn.Module): + """Constant input. + Args: + num_channel (int): Channel number of constant input. + size (int): Spatial size of constant input. + """ + + def __init__(self, num_channel, size): + super(ConstantInput, self).__init__() + self.weight = nn.Parameter(torch.randn(1, num_channel, size, size)) + + def forward(self, batch): + out = self.weight.repeat(batch, 1, 1, 1) + return out + + +class StyleGAN2GeneratorBilinear(nn.Module): + """StyleGAN2 Generator. + Args: + out_size (int): The spatial size of outputs. + num_style_feat (int): Channel number of style features. Default: 512. + num_mlp (int): Layer number of MLP style layers. Default: 8. + channel_multiplier (int): Channel multiplier for large networks of + StyleGAN2. Default: 2. + lr_mlp (float): Learning rate multiplier for mlp layers. Default: 0.01. + narrow (float): Narrow ratio for channels. Default: 1.0. + """ + + def __init__( + self, + out_size, + num_style_feat=512, + num_mlp=8, + channel_multiplier=2, + lr_mlp=0.01, + narrow=1, + interpolation_mode="bilinear", + ): + super(StyleGAN2GeneratorBilinear, self).__init__() + # Style MLP layers + self.num_style_feat = num_style_feat + style_mlp_layers = [NormStyleCode()] + for i in range(num_mlp): + style_mlp_layers.append( + EqualLinear( + num_style_feat, + num_style_feat, + bias=True, + bias_init_val=0, + lr_mul=lr_mlp, + activation="fused_lrelu", + ) + ) + self.style_mlp = nn.Sequential(*style_mlp_layers) + + channels = { + "4": int(512 * narrow), + "8": int(512 * narrow), + "16": int(512 * narrow), + "32": int(512 * narrow), + "64": int(256 * channel_multiplier * narrow), + "128": int(128 * channel_multiplier * narrow), + "256": int(64 * channel_multiplier * narrow), + "512": int(32 * channel_multiplier * narrow), + "1024": int(16 * channel_multiplier * narrow), + } + self.channels = channels + + self.constant_input = ConstantInput(channels["4"], size=4) + self.style_conv1 = StyleConv( + channels["4"], + channels["4"], + kernel_size=3, + num_style_feat=num_style_feat, + demodulate=True, + sample_mode=None, + interpolation_mode=interpolation_mode, + ) + self.to_rgb1 = ToRGB( + channels["4"], + num_style_feat, + upsample=False, + interpolation_mode=interpolation_mode, + ) + + self.log_size = int(math.log(out_size, 2)) + self.num_layers = (self.log_size - 2) * 2 + 1 + self.num_latent = self.log_size * 2 - 2 + + self.style_convs = nn.ModuleList() + self.to_rgbs = nn.ModuleList() + self.noises = nn.Module() + + in_channels = channels["4"] + # noise + for layer_idx in range(self.num_layers): + resolution = 2 ** ((layer_idx + 5) // 2) + shape = [1, 1, resolution, resolution] + self.noises.register_buffer(f"noise{layer_idx}", torch.randn(*shape)) + # style convs and to_rgbs + for i in range(3, self.log_size + 1): + out_channels = channels[f"{2**i}"] + self.style_convs.append( + StyleConv( + in_channels, + out_channels, + kernel_size=3, + num_style_feat=num_style_feat, + demodulate=True, + sample_mode="upsample", + interpolation_mode=interpolation_mode, + ) + ) + self.style_convs.append( + StyleConv( + out_channels, + out_channels, + kernel_size=3, + num_style_feat=num_style_feat, + demodulate=True, + sample_mode=None, + interpolation_mode=interpolation_mode, + ) + ) + self.to_rgbs.append( + ToRGB( + out_channels, + num_style_feat, + upsample=True, + interpolation_mode=interpolation_mode, + ) + ) + in_channels = out_channels + + def make_noise(self): + """Make noise for noise injection.""" + device = self.constant_input.weight.device + noises = [torch.randn(1, 1, 4, 4, device=device)] + + for i in range(3, self.log_size + 1): + for _ in range(2): + noises.append(torch.randn(1, 1, 2**i, 2**i, device=device)) + + return noises + + def get_latent(self, x): + return self.style_mlp(x) + + def mean_latent(self, num_latent): + latent_in = torch.randn( + num_latent, self.num_style_feat, device=self.constant_input.weight.device + ) + latent = self.style_mlp(latent_in).mean(0, keepdim=True) + return latent + + def forward( + self, + styles, + input_is_latent=False, + noise=None, + randomize_noise=True, + truncation=1, + truncation_latent=None, + inject_index=None, + return_latents=False, + ): + """Forward function for StyleGAN2Generator. + Args: + styles (list[Tensor]): Sample codes of styles. + input_is_latent (bool): Whether input is latent style. + Default: False. + noise (Tensor | None): Input noise or None. Default: None. + randomize_noise (bool): Randomize noise, used when 'noise' is + False. Default: True. + truncation (float): TODO. Default: 1. + truncation_latent (Tensor | None): TODO. Default: None. + inject_index (int | None): The injection index for mixing noise. + Default: None. + return_latents (bool): Whether to return style latents. + Default: False. + """ + # style codes -> latents with Style MLP layer + if not input_is_latent: + styles = [self.style_mlp(s) for s in styles] + # noises + if noise is None: + if randomize_noise: + noise = [None] * self.num_layers # for each style conv layer + else: # use the stored noise + noise = [ + getattr(self.noises, f"noise{i}") for i in range(self.num_layers) + ] + # style truncation + if truncation < 1: + style_truncation = [] + for style in styles: + style_truncation.append( + truncation_latent + truncation * (style - truncation_latent) + ) + styles = style_truncation + # get style latent with injection + if len(styles) == 1: + inject_index = self.num_latent + + if styles[0].ndim < 3: + # repeat latent code for all the layers + latent = styles[0].unsqueeze(1).repeat(1, inject_index, 1) + else: # used for encoder with different latent code for each layer + latent = styles[0] + elif len(styles) == 2: # mixing noises + if inject_index is None: + inject_index = random.randint(1, self.num_latent - 1) + latent1 = styles[0].unsqueeze(1).repeat(1, inject_index, 1) + latent2 = ( + styles[1].unsqueeze(1).repeat(1, self.num_latent - inject_index, 1) + ) + latent = torch.cat([latent1, latent2], 1) + + # main generation + out = self.constant_input(latent.shape[0]) + out = self.style_conv1(out, latent[:, 0], noise=noise[0]) + skip = self.to_rgb1(out, latent[:, 1]) + + i = 1 + for conv1, conv2, noise1, noise2, to_rgb in zip( + self.style_convs[::2], + self.style_convs[1::2], + noise[1::2], + noise[2::2], + self.to_rgbs, + ): + out = conv1(out, latent[:, i], noise=noise1) + out = conv2(out, latent[:, i + 1], noise=noise2) + skip = to_rgb(out, latent[:, i + 2], skip) + i += 2 + + image = skip + + if return_latents: + return image, latent + else: + return image, None + + +class ScaledLeakyReLU(nn.Module): + """Scaled LeakyReLU. + Args: + negative_slope (float): Negative slope. Default: 0.2. + """ + + def __init__(self, negative_slope=0.2): + super(ScaledLeakyReLU, self).__init__() + self.negative_slope = negative_slope + + def forward(self, x): + out = F.leaky_relu(x, negative_slope=self.negative_slope) + return out * math.sqrt(2) + + +class EqualConv2d(nn.Module): + """Equalized Linear as StyleGAN2. + Args: + in_channels (int): Channel number of the input. + out_channels (int): Channel number of the output. + kernel_size (int): Size of the convolving kernel. + stride (int): Stride of the convolution. Default: 1 + padding (int): Zero-padding added to both sides of the input. + Default: 0. + bias (bool): If ``True``, adds a learnable bias to the output. + Default: ``True``. + bias_init_val (float): Bias initialized value. Default: 0. + """ + + def __init__( + self, + in_channels, + out_channels, + kernel_size, + stride=1, + padding=0, + bias=True, + bias_init_val=0, + ): + super(EqualConv2d, self).__init__() + self.in_channels = in_channels + self.out_channels = out_channels + self.kernel_size = kernel_size + self.stride = stride + self.padding = padding + self.scale = 1 / math.sqrt(in_channels * kernel_size**2) + + self.weight = nn.Parameter( + torch.randn(out_channels, in_channels, kernel_size, kernel_size) + ) + if bias: + self.bias = nn.Parameter(torch.zeros(out_channels).fill_(bias_init_val)) + else: + self.register_parameter("bias", None) + + def forward(self, x): + out = F.conv2d( + x, + self.weight * self.scale, + bias=self.bias, + stride=self.stride, + padding=self.padding, + ) + + return out + + def __repr__(self): + return ( + f"{self.__class__.__name__}(in_channels={self.in_channels}, " + f"out_channels={self.out_channels}, " + f"kernel_size={self.kernel_size}," + f" stride={self.stride}, padding={self.padding}, " + f"bias={self.bias is not None})" + ) + + +class ConvLayer(nn.Sequential): + """Conv Layer used in StyleGAN2 Discriminator. + Args: + in_channels (int): Channel number of the input. + out_channels (int): Channel number of the output. + kernel_size (int): Kernel size. + downsample (bool): Whether downsample by a factor of 2. + Default: False. + bias (bool): Whether with bias. Default: True. + activate (bool): Whether use activateion. Default: True. + """ + + def __init__( + self, + in_channels, + out_channels, + kernel_size, + downsample=False, + bias=True, + activate=True, + interpolation_mode="bilinear", + ): + layers = [] + self.interpolation_mode = interpolation_mode + # downsample + if downsample: + if self.interpolation_mode == "nearest": + self.align_corners = None + else: + self.align_corners = False + + layers.append( + torch.nn.Upsample( + scale_factor=0.5, + mode=interpolation_mode, + align_corners=self.align_corners, + ) + ) + stride = 1 + self.padding = kernel_size // 2 + # conv + layers.append( + EqualConv2d( + in_channels, + out_channels, + kernel_size, + stride=stride, + padding=self.padding, + bias=bias and not activate, + ) + ) + # activation + if activate: + if bias: + layers.append(FusedLeakyReLU(out_channels)) + else: + layers.append(ScaledLeakyReLU(0.2)) + + super(ConvLayer, self).__init__(*layers) + + +class ResBlock(nn.Module): + """Residual block used in StyleGAN2 Discriminator. + Args: + in_channels (int): Channel number of the input. + out_channels (int): Channel number of the output. + """ + + def __init__(self, in_channels, out_channels, interpolation_mode="bilinear"): + super(ResBlock, self).__init__() + + self.conv1 = ConvLayer(in_channels, in_channels, 3, bias=True, activate=True) + self.conv2 = ConvLayer( + in_channels, + out_channels, + 3, + downsample=True, + interpolation_mode=interpolation_mode, + bias=True, + activate=True, + ) + self.skip = ConvLayer( + in_channels, + out_channels, + 1, + downsample=True, + interpolation_mode=interpolation_mode, + bias=False, + activate=False, + ) + + def forward(self, x): + out = self.conv1(x) + out = self.conv2(out) + skip = self.skip(x) + out = (out + skip) / math.sqrt(2) + return out diff --git a/ldm_patched/pfn/architecture/face/stylegan2_clean_arch.py b/ldm_patched/pfn/architecture/face/stylegan2_clean_arch.py new file mode 100644 index 0000000000000000000000000000000000000000..c48de9af6904b8d1891a84efa8e4d76104d5d710 --- /dev/null +++ b/ldm_patched/pfn/architecture/face/stylegan2_clean_arch.py @@ -0,0 +1,453 @@ +# pylint: skip-file +# type: ignore +import math + +import torch +from torch import nn +from torch.nn import functional as F +from torch.nn import init +from torch.nn.modules.batchnorm import _BatchNorm + + +@torch.no_grad() +def default_init_weights(module_list, scale=1, bias_fill=0, **kwargs): + """Initialize network weights. + Args: + module_list (list[nn.Module] | nn.Module): Modules to be initialized. + scale (float): Scale initialized weights, especially for residual + blocks. Default: 1. + bias_fill (float): The value to fill bias. Default: 0 + kwargs (dict): Other arguments for initialization function. + """ + if not isinstance(module_list, list): + module_list = [module_list] + for module in module_list: + for m in module.modules(): + if isinstance(m, nn.Conv2d): + init.kaiming_normal_(m.weight, **kwargs) + m.weight.data *= scale + if m.bias is not None: + m.bias.data.fill_(bias_fill) + elif isinstance(m, nn.Linear): + init.kaiming_normal_(m.weight, **kwargs) + m.weight.data *= scale + if m.bias is not None: + m.bias.data.fill_(bias_fill) + elif isinstance(m, _BatchNorm): + init.constant_(m.weight, 1) + if m.bias is not None: + m.bias.data.fill_(bias_fill) + + +class NormStyleCode(nn.Module): + def forward(self, x): + """Normalize the style codes. + Args: + x (Tensor): Style codes with shape (b, c). + Returns: + Tensor: Normalized tensor. + """ + return x * torch.rsqrt(torch.mean(x**2, dim=1, keepdim=True) + 1e-8) + + +class ModulatedConv2d(nn.Module): + """Modulated Conv2d used in StyleGAN2. + There is no bias in ModulatedConv2d. + Args: + in_channels (int): Channel number of the input. + out_channels (int): Channel number of the output. + kernel_size (int): Size of the convolving kernel. + num_style_feat (int): Channel number of style features. + demodulate (bool): Whether to demodulate in the conv layer. Default: True. + sample_mode (str | None): Indicating 'upsample', 'downsample' or None. Default: None. + eps (float): A value added to the denominator for numerical stability. Default: 1e-8. + """ + + def __init__( + self, + in_channels, + out_channels, + kernel_size, + num_style_feat, + demodulate=True, + sample_mode=None, + eps=1e-8, + ): + super(ModulatedConv2d, self).__init__() + self.in_channels = in_channels + self.out_channels = out_channels + self.kernel_size = kernel_size + self.demodulate = demodulate + self.sample_mode = sample_mode + self.eps = eps + + # modulation inside each modulated conv + self.modulation = nn.Linear(num_style_feat, in_channels, bias=True) + # initialization + default_init_weights( + self.modulation, + scale=1, + bias_fill=1, + a=0, + mode="fan_in", + nonlinearity="linear", + ) + + self.weight = nn.Parameter( + torch.randn(1, out_channels, in_channels, kernel_size, kernel_size) + / math.sqrt(in_channels * kernel_size**2) + ) + self.padding = kernel_size // 2 + + def forward(self, x, style): + """Forward function. + Args: + x (Tensor): Tensor with shape (b, c, h, w). + style (Tensor): Tensor with shape (b, num_style_feat). + Returns: + Tensor: Modulated tensor after convolution. + """ + b, c, h, w = x.shape # c = c_in + # weight modulation + style = self.modulation(style).view(b, 1, c, 1, 1) + # self.weight: (1, c_out, c_in, k, k); style: (b, 1, c, 1, 1) + weight = self.weight * style # (b, c_out, c_in, k, k) + + if self.demodulate: + demod = torch.rsqrt(weight.pow(2).sum([2, 3, 4]) + self.eps) + weight = weight * demod.view(b, self.out_channels, 1, 1, 1) + + weight = weight.view( + b * self.out_channels, c, self.kernel_size, self.kernel_size + ) + + # upsample or downsample if necessary + if self.sample_mode == "upsample": + x = F.interpolate(x, scale_factor=2, mode="bilinear", align_corners=False) + elif self.sample_mode == "downsample": + x = F.interpolate(x, scale_factor=0.5, mode="bilinear", align_corners=False) + + b, c, h, w = x.shape + x = x.view(1, b * c, h, w) + # weight: (b*c_out, c_in, k, k), groups=b + out = F.conv2d(x, weight, padding=self.padding, groups=b) + out = out.view(b, self.out_channels, *out.shape[2:4]) + + return out + + def __repr__(self): + return ( + f"{self.__class__.__name__}(in_channels={self.in_channels}, out_channels={self.out_channels}, " + f"kernel_size={self.kernel_size}, demodulate={self.demodulate}, sample_mode={self.sample_mode})" + ) + + +class StyleConv(nn.Module): + """Style conv used in StyleGAN2. + Args: + in_channels (int): Channel number of the input. + out_channels (int): Channel number of the output. + kernel_size (int): Size of the convolving kernel. + num_style_feat (int): Channel number of style features. + demodulate (bool): Whether demodulate in the conv layer. Default: True. + sample_mode (str | None): Indicating 'upsample', 'downsample' or None. Default: None. + """ + + def __init__( + self, + in_channels, + out_channels, + kernel_size, + num_style_feat, + demodulate=True, + sample_mode=None, + ): + super(StyleConv, self).__init__() + self.modulated_conv = ModulatedConv2d( + in_channels, + out_channels, + kernel_size, + num_style_feat, + demodulate=demodulate, + sample_mode=sample_mode, + ) + self.weight = nn.Parameter(torch.zeros(1)) # for noise injection + self.bias = nn.Parameter(torch.zeros(1, out_channels, 1, 1)) + self.activate = nn.LeakyReLU(negative_slope=0.2, inplace=True) + + def forward(self, x, style, noise=None): + # modulate + out = self.modulated_conv(x, style) * 2**0.5 # for conversion + # noise injection + if noise is None: + b, _, h, w = out.shape + noise = out.new_empty(b, 1, h, w).normal_() + out = out + self.weight * noise + # add bias + out = out + self.bias + # activation + out = self.activate(out) + return out + + +class ToRGB(nn.Module): + """To RGB (image space) from features. + Args: + in_channels (int): Channel number of input. + num_style_feat (int): Channel number of style features. + upsample (bool): Whether to upsample. Default: True. + """ + + def __init__(self, in_channels, num_style_feat, upsample=True): + super(ToRGB, self).__init__() + self.upsample = upsample + self.modulated_conv = ModulatedConv2d( + in_channels, + 3, + kernel_size=1, + num_style_feat=num_style_feat, + demodulate=False, + sample_mode=None, + ) + self.bias = nn.Parameter(torch.zeros(1, 3, 1, 1)) + + def forward(self, x, style, skip=None): + """Forward function. + Args: + x (Tensor): Feature tensor with shape (b, c, h, w). + style (Tensor): Tensor with shape (b, num_style_feat). + skip (Tensor): Base/skip tensor. Default: None. + Returns: + Tensor: RGB images. + """ + out = self.modulated_conv(x, style) + out = out + self.bias + if skip is not None: + if self.upsample: + skip = F.interpolate( + skip, scale_factor=2, mode="bilinear", align_corners=False + ) + out = out + skip + return out + + +class ConstantInput(nn.Module): + """Constant input. + Args: + num_channel (int): Channel number of constant input. + size (int): Spatial size of constant input. + """ + + def __init__(self, num_channel, size): + super(ConstantInput, self).__init__() + self.weight = nn.Parameter(torch.randn(1, num_channel, size, size)) + + def forward(self, batch): + out = self.weight.repeat(batch, 1, 1, 1) + return out + + +class StyleGAN2GeneratorClean(nn.Module): + """Clean version of StyleGAN2 Generator. + Args: + out_size (int): The spatial size of outputs. + num_style_feat (int): Channel number of style features. Default: 512. + num_mlp (int): Layer number of MLP style layers. Default: 8. + channel_multiplier (int): Channel multiplier for large networks of StyleGAN2. Default: 2. + narrow (float): Narrow ratio for channels. Default: 1.0. + """ + + def __init__( + self, out_size, num_style_feat=512, num_mlp=8, channel_multiplier=2, narrow=1 + ): + super(StyleGAN2GeneratorClean, self).__init__() + # Style MLP layers + self.num_style_feat = num_style_feat + style_mlp_layers = [NormStyleCode()] + for i in range(num_mlp): + style_mlp_layers.extend( + [ + nn.Linear(num_style_feat, num_style_feat, bias=True), + nn.LeakyReLU(negative_slope=0.2, inplace=True), + ] + ) + self.style_mlp = nn.Sequential(*style_mlp_layers) + # initialization + default_init_weights( + self.style_mlp, + scale=1, + bias_fill=0, + a=0.2, + mode="fan_in", + nonlinearity="leaky_relu", + ) + + # channel list + channels = { + "4": int(512 * narrow), + "8": int(512 * narrow), + "16": int(512 * narrow), + "32": int(512 * narrow), + "64": int(256 * channel_multiplier * narrow), + "128": int(128 * channel_multiplier * narrow), + "256": int(64 * channel_multiplier * narrow), + "512": int(32 * channel_multiplier * narrow), + "1024": int(16 * channel_multiplier * narrow), + } + self.channels = channels + + self.constant_input = ConstantInput(channels["4"], size=4) + self.style_conv1 = StyleConv( + channels["4"], + channels["4"], + kernel_size=3, + num_style_feat=num_style_feat, + demodulate=True, + sample_mode=None, + ) + self.to_rgb1 = ToRGB(channels["4"], num_style_feat, upsample=False) + + self.log_size = int(math.log(out_size, 2)) + self.num_layers = (self.log_size - 2) * 2 + 1 + self.num_latent = self.log_size * 2 - 2 + + self.style_convs = nn.ModuleList() + self.to_rgbs = nn.ModuleList() + self.noises = nn.Module() + + in_channels = channels["4"] + # noise + for layer_idx in range(self.num_layers): + resolution = 2 ** ((layer_idx + 5) // 2) + shape = [1, 1, resolution, resolution] + self.noises.register_buffer(f"noise{layer_idx}", torch.randn(*shape)) + # style convs and to_rgbs + for i in range(3, self.log_size + 1): + out_channels = channels[f"{2**i}"] + self.style_convs.append( + StyleConv( + in_channels, + out_channels, + kernel_size=3, + num_style_feat=num_style_feat, + demodulate=True, + sample_mode="upsample", + ) + ) + self.style_convs.append( + StyleConv( + out_channels, + out_channels, + kernel_size=3, + num_style_feat=num_style_feat, + demodulate=True, + sample_mode=None, + ) + ) + self.to_rgbs.append(ToRGB(out_channels, num_style_feat, upsample=True)) + in_channels = out_channels + + def make_noise(self): + """Make noise for noise injection.""" + device = self.constant_input.weight.device + noises = [torch.randn(1, 1, 4, 4, device=device)] + + for i in range(3, self.log_size + 1): + for _ in range(2): + noises.append(torch.randn(1, 1, 2**i, 2**i, device=device)) + + return noises + + def get_latent(self, x): + return self.style_mlp(x) + + def mean_latent(self, num_latent): + latent_in = torch.randn( + num_latent, self.num_style_feat, device=self.constant_input.weight.device + ) + latent = self.style_mlp(latent_in).mean(0, keepdim=True) + return latent + + def forward( + self, + styles, + input_is_latent=False, + noise=None, + randomize_noise=True, + truncation=1, + truncation_latent=None, + inject_index=None, + return_latents=False, + ): + """Forward function for StyleGAN2GeneratorClean. + Args: + styles (list[Tensor]): Sample codes of styles. + input_is_latent (bool): Whether input is latent style. Default: False. + noise (Tensor | None): Input noise or None. Default: None. + randomize_noise (bool): Randomize noise, used when 'noise' is False. Default: True. + truncation (float): The truncation ratio. Default: 1. + truncation_latent (Tensor | None): The truncation latent tensor. Default: None. + inject_index (int | None): The injection index for mixing noise. Default: None. + return_latents (bool): Whether to return style latents. Default: False. + """ + # style codes -> latents with Style MLP layer + if not input_is_latent: + styles = [self.style_mlp(s) for s in styles] + # noises + if noise is None: + if randomize_noise: + noise = [None] * self.num_layers # for each style conv layer + else: # use the stored noise + noise = [ + getattr(self.noises, f"noise{i}") for i in range(self.num_layers) + ] + # style truncation + if truncation < 1: + style_truncation = [] + for style in styles: + style_truncation.append( + truncation_latent + truncation * (style - truncation_latent) + ) + styles = style_truncation + # get style latents with injection + if len(styles) == 1: + inject_index = self.num_latent + + if styles[0].ndim < 3: + # repeat latent code for all the layers + latent = styles[0].unsqueeze(1).repeat(1, inject_index, 1) + else: # used for encoder with different latent code for each layer + latent = styles[0] + elif len(styles) == 2: # mixing noises + if inject_index is None: + inject_index = random.randint(1, self.num_latent - 1) + latent1 = styles[0].unsqueeze(1).repeat(1, inject_index, 1) + latent2 = ( + styles[1].unsqueeze(1).repeat(1, self.num_latent - inject_index, 1) + ) + latent = torch.cat([latent1, latent2], 1) + + # main generation + out = self.constant_input(latent.shape[0]) + out = self.style_conv1(out, latent[:, 0], noise=noise[0]) + skip = self.to_rgb1(out, latent[:, 1]) + + i = 1 + for conv1, conv2, noise1, noise2, to_rgb in zip( + self.style_convs[::2], + self.style_convs[1::2], + noise[1::2], + noise[2::2], + self.to_rgbs, + ): + out = conv1(out, latent[:, i], noise=noise1) + out = conv2(out, latent[:, i + 1], noise=noise2) + skip = to_rgb(out, latent[:, i + 2], skip) # feature back to the rgb space + i += 2 + + image = skip + + if return_latents: + return image, latent + else: + return image, None diff --git a/ldm_patched/pfn/architecture/face/upfirdn2d.py b/ldm_patched/pfn/architecture/face/upfirdn2d.py new file mode 100644 index 0000000000000000000000000000000000000000..4ea4541513f27e3c9dddcee864cfeb87efddadb7 --- /dev/null +++ b/ldm_patched/pfn/architecture/face/upfirdn2d.py @@ -0,0 +1,194 @@ +# pylint: skip-file +# type: ignore +# modify from https://github.com/rosinality/stylegan2-pytorch/blob/master/op/upfirdn2d.py # noqa:E501 + +import os + +import torch +from torch.autograd import Function +from torch.nn import functional as F + +upfirdn2d_ext = None + + +class UpFirDn2dBackward(Function): + @staticmethod + def forward( + ctx, grad_output, kernel, grad_kernel, up, down, pad, g_pad, in_size, out_size + ): + up_x, up_y = up + down_x, down_y = down + g_pad_x0, g_pad_x1, g_pad_y0, g_pad_y1 = g_pad + + grad_output = grad_output.reshape(-1, out_size[0], out_size[1], 1) + + grad_input = upfirdn2d_ext.upfirdn2d( + grad_output, + grad_kernel, + down_x, + down_y, + up_x, + up_y, + g_pad_x0, + g_pad_x1, + g_pad_y0, + g_pad_y1, + ) + grad_input = grad_input.view(in_size[0], in_size[1], in_size[2], in_size[3]) + + ctx.save_for_backward(kernel) + + pad_x0, pad_x1, pad_y0, pad_y1 = pad + + ctx.up_x = up_x + ctx.up_y = up_y + ctx.down_x = down_x + ctx.down_y = down_y + ctx.pad_x0 = pad_x0 + ctx.pad_x1 = pad_x1 + ctx.pad_y0 = pad_y0 + ctx.pad_y1 = pad_y1 + ctx.in_size = in_size + ctx.out_size = out_size + + return grad_input + + @staticmethod + def backward(ctx, gradgrad_input): + (kernel,) = ctx.saved_tensors + + gradgrad_input = gradgrad_input.reshape(-1, ctx.in_size[2], ctx.in_size[3], 1) + + gradgrad_out = upfirdn2d_ext.upfirdn2d( + gradgrad_input, + kernel, + ctx.up_x, + ctx.up_y, + ctx.down_x, + ctx.down_y, + ctx.pad_x0, + ctx.pad_x1, + ctx.pad_y0, + ctx.pad_y1, + ) + # gradgrad_out = gradgrad_out.view(ctx.in_size[0], ctx.out_size[0], + # ctx.out_size[1], ctx.in_size[3]) + gradgrad_out = gradgrad_out.view( + ctx.in_size[0], ctx.in_size[1], ctx.out_size[0], ctx.out_size[1] + ) + + return gradgrad_out, None, None, None, None, None, None, None, None + + +class UpFirDn2d(Function): + @staticmethod + def forward(ctx, input, kernel, up, down, pad): + up_x, up_y = up + down_x, down_y = down + pad_x0, pad_x1, pad_y0, pad_y1 = pad + + kernel_h, kernel_w = kernel.shape + _, channel, in_h, in_w = input.shape + ctx.in_size = input.shape + + input = input.reshape(-1, in_h, in_w, 1) + + ctx.save_for_backward(kernel, torch.flip(kernel, [0, 1])) + + out_h = (in_h * up_y + pad_y0 + pad_y1 - kernel_h) // down_y + 1 + out_w = (in_w * up_x + pad_x0 + pad_x1 - kernel_w) // down_x + 1 + ctx.out_size = (out_h, out_w) + + ctx.up = (up_x, up_y) + ctx.down = (down_x, down_y) + ctx.pad = (pad_x0, pad_x1, pad_y0, pad_y1) + + g_pad_x0 = kernel_w - pad_x0 - 1 + g_pad_y0 = kernel_h - pad_y0 - 1 + g_pad_x1 = in_w * up_x - out_w * down_x + pad_x0 - up_x + 1 + g_pad_y1 = in_h * up_y - out_h * down_y + pad_y0 - up_y + 1 + + ctx.g_pad = (g_pad_x0, g_pad_x1, g_pad_y0, g_pad_y1) + + out = upfirdn2d_ext.upfirdn2d( + input, kernel, up_x, up_y, down_x, down_y, pad_x0, pad_x1, pad_y0, pad_y1 + ) + # out = out.view(major, out_h, out_w, minor) + out = out.view(-1, channel, out_h, out_w) + + return out + + @staticmethod + def backward(ctx, grad_output): + kernel, grad_kernel = ctx.saved_tensors + + grad_input = UpFirDn2dBackward.apply( + grad_output, + kernel, + grad_kernel, + ctx.up, + ctx.down, + ctx.pad, + ctx.g_pad, + ctx.in_size, + ctx.out_size, + ) + + return grad_input, None, None, None, None + + +def upfirdn2d(input, kernel, up=1, down=1, pad=(0, 0)): + if input.device.type == "cpu": + out = upfirdn2d_native( + input, kernel, up, up, down, down, pad[0], pad[1], pad[0], pad[1] + ) + else: + out = UpFirDn2d.apply( + input, kernel, (up, up), (down, down), (pad[0], pad[1], pad[0], pad[1]) + ) + + return out + + +def upfirdn2d_native( + input, kernel, up_x, up_y, down_x, down_y, pad_x0, pad_x1, pad_y0, pad_y1 +): + _, channel, in_h, in_w = input.shape + input = input.reshape(-1, in_h, in_w, 1) + + _, in_h, in_w, minor = input.shape + kernel_h, kernel_w = kernel.shape + + out = input.view(-1, in_h, 1, in_w, 1, minor) + out = F.pad(out, [0, 0, 0, up_x - 1, 0, 0, 0, up_y - 1]) + out = out.view(-1, in_h * up_y, in_w * up_x, minor) + + out = F.pad( + out, [0, 0, max(pad_x0, 0), max(pad_x1, 0), max(pad_y0, 0), max(pad_y1, 0)] + ) + out = out[ + :, + max(-pad_y0, 0) : out.shape[1] - max(-pad_y1, 0), + max(-pad_x0, 0) : out.shape[2] - max(-pad_x1, 0), + :, + ] + + out = out.permute(0, 3, 1, 2) + out = out.reshape( + [-1, 1, in_h * up_y + pad_y0 + pad_y1, in_w * up_x + pad_x0 + pad_x1] + ) + w = torch.flip(kernel, [0, 1]).view(1, 1, kernel_h, kernel_w) + out = F.conv2d(out, w) + out = out.reshape( + -1, + minor, + in_h * up_y + pad_y0 + pad_y1 - kernel_h + 1, + in_w * up_x + pad_x0 + pad_x1 - kernel_w + 1, + ) + out = out.permute(0, 2, 3, 1) + out = out[:, ::down_y, ::down_x, :] + + out_h = (in_h * up_y + pad_y0 + pad_y1 - kernel_h) // down_y + 1 + out_w = (in_w * up_x + pad_x0 + pad_x1 - kernel_w) // down_x + 1 + + return out.view(-1, channel, out_h, out_w) diff --git a/ldm_patched/pfn/architecture/timm/LICENSE b/ldm_patched/pfn/architecture/timm/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..b4e9438bd1e07e17abf58cfd86e536ec880348a3 --- /dev/null +++ b/ldm_patched/pfn/architecture/timm/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Ross Wightman + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/ldm_patched/pfn/architecture/timm/__pycache__/drop.cpython-310.pyc b/ldm_patched/pfn/architecture/timm/__pycache__/drop.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1b991ad636f224bf0d16ef255d2ce0c02f310297 Binary files /dev/null and b/ldm_patched/pfn/architecture/timm/__pycache__/drop.cpython-310.pyc differ diff --git a/ldm_patched/pfn/architecture/timm/__pycache__/helpers.cpython-310.pyc b/ldm_patched/pfn/architecture/timm/__pycache__/helpers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7e9da7efaf6b51d0d5c069e1febc7f9f75b937c8 Binary files /dev/null and b/ldm_patched/pfn/architecture/timm/__pycache__/helpers.cpython-310.pyc differ diff --git a/ldm_patched/pfn/architecture/timm/__pycache__/weight_init.cpython-310.pyc b/ldm_patched/pfn/architecture/timm/__pycache__/weight_init.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d3afc71c0ed21f103855e6c2112a9031ceb11669 Binary files /dev/null and b/ldm_patched/pfn/architecture/timm/__pycache__/weight_init.cpython-310.pyc differ diff --git a/ldm_patched/pfn/architecture/timm/drop.py b/ldm_patched/pfn/architecture/timm/drop.py new file mode 100644 index 0000000000000000000000000000000000000000..14f0da914b2a198af7e6124cd90bad6adaf8a84e --- /dev/null +++ b/ldm_patched/pfn/architecture/timm/drop.py @@ -0,0 +1,223 @@ +""" DropBlock, DropPath + +PyTorch implementations of DropBlock and DropPath (Stochastic Depth) regularization layers. + +Papers: +DropBlock: A regularization method for convolutional networks (https://arxiv.org/abs/1810.12890) + +Deep Networks with Stochastic Depth (https://arxiv.org/abs/1603.09382) + +Code: +DropBlock impl inspired by two Tensorflow impl that I liked: + - https://github.com/tensorflow/tpu/blob/master/models/official/resnet/resnet_model.py#L74 + - https://github.com/clovaai/assembled-cnn/blob/master/nets/blocks.py + +Hacked together by / Copyright 2020 Ross Wightman +""" +import torch +import torch.nn as nn +import torch.nn.functional as F + + +def drop_block_2d( + x, + drop_prob: float = 0.1, + block_size: int = 7, + gamma_scale: float = 1.0, + with_noise: bool = False, + inplace: bool = False, + batchwise: bool = False, +): + """DropBlock. See https://arxiv.org/pdf/1810.12890.pdf + + DropBlock with an experimental gaussian noise option. This layer has been tested on a few training + runs with success, but needs further validation and possibly optimization for lower runtime impact. + """ + _, C, H, W = x.shape + total_size = W * H + clipped_block_size = min(block_size, min(W, H)) + # seed_drop_rate, the gamma parameter + gamma = ( + gamma_scale + * drop_prob + * total_size + / clipped_block_size**2 + / ((W - block_size + 1) * (H - block_size + 1)) + ) + + # Forces the block to be inside the feature map. + w_i, h_i = torch.meshgrid( + torch.arange(W).to(x.device), torch.arange(H).to(x.device) + ) + valid_block = ( + (w_i >= clipped_block_size // 2) & (w_i < W - (clipped_block_size - 1) // 2) + ) & ((h_i >= clipped_block_size // 2) & (h_i < H - (clipped_block_size - 1) // 2)) + valid_block = torch.reshape(valid_block, (1, 1, H, W)).to(dtype=x.dtype) + + if batchwise: + # one mask for whole batch, quite a bit faster + uniform_noise = torch.rand((1, C, H, W), dtype=x.dtype, device=x.device) + else: + uniform_noise = torch.rand_like(x) + block_mask = ((2 - gamma - valid_block + uniform_noise) >= 1).to(dtype=x.dtype) + block_mask = -F.max_pool2d( + -block_mask, + kernel_size=clipped_block_size, # block_size, + stride=1, + padding=clipped_block_size // 2, + ) + + if with_noise: + normal_noise = ( + torch.randn((1, C, H, W), dtype=x.dtype, device=x.device) + if batchwise + else torch.randn_like(x) + ) + if inplace: + x.mul_(block_mask).add_(normal_noise * (1 - block_mask)) + else: + x = x * block_mask + normal_noise * (1 - block_mask) + else: + normalize_scale = ( + block_mask.numel() / block_mask.to(dtype=torch.float32).sum().add(1e-7) + ).to(x.dtype) + if inplace: + x.mul_(block_mask * normalize_scale) + else: + x = x * block_mask * normalize_scale + return x + + +def drop_block_fast_2d( + x: torch.Tensor, + drop_prob: float = 0.1, + block_size: int = 7, + gamma_scale: float = 1.0, + with_noise: bool = False, + inplace: bool = False, +): + """DropBlock. See https://arxiv.org/pdf/1810.12890.pdf + + DropBlock with an experimental gaussian noise option. Simplied from above without concern for valid + block mask at edges. + """ + _, _, H, W = x.shape + total_size = W * H + clipped_block_size = min(block_size, min(W, H)) + gamma = ( + gamma_scale + * drop_prob + * total_size + / clipped_block_size**2 + / ((W - block_size + 1) * (H - block_size + 1)) + ) + + block_mask = torch.empty_like(x).bernoulli_(gamma) + block_mask = F.max_pool2d( + block_mask.to(x.dtype), + kernel_size=clipped_block_size, + stride=1, + padding=clipped_block_size // 2, + ) + + if with_noise: + normal_noise = torch.empty_like(x).normal_() + if inplace: + x.mul_(1.0 - block_mask).add_(normal_noise * block_mask) + else: + x = x * (1.0 - block_mask) + normal_noise * block_mask + else: + block_mask = 1 - block_mask + normalize_scale = ( + block_mask.numel() / block_mask.to(dtype=torch.float32).sum().add(1e-6) + ).to(dtype=x.dtype) + if inplace: + x.mul_(block_mask * normalize_scale) + else: + x = x * block_mask * normalize_scale + return x + + +class DropBlock2d(nn.Module): + """DropBlock. See https://arxiv.org/pdf/1810.12890.pdf""" + + def __init__( + self, + drop_prob: float = 0.1, + block_size: int = 7, + gamma_scale: float = 1.0, + with_noise: bool = False, + inplace: bool = False, + batchwise: bool = False, + fast: bool = True, + ): + super(DropBlock2d, self).__init__() + self.drop_prob = drop_prob + self.gamma_scale = gamma_scale + self.block_size = block_size + self.with_noise = with_noise + self.inplace = inplace + self.batchwise = batchwise + self.fast = fast # FIXME finish comparisons of fast vs not + + def forward(self, x): + if not self.training or not self.drop_prob: + return x + if self.fast: + return drop_block_fast_2d( + x, + self.drop_prob, + self.block_size, + self.gamma_scale, + self.with_noise, + self.inplace, + ) + else: + return drop_block_2d( + x, + self.drop_prob, + self.block_size, + self.gamma_scale, + self.with_noise, + self.inplace, + self.batchwise, + ) + + +def drop_path( + x, drop_prob: float = 0.0, training: bool = False, scale_by_keep: bool = True +): + """Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks). + + This is the same as the DropConnect impl I created for EfficientNet, etc networks, however, + the original name is misleading as 'Drop Connect' is a different form of dropout in a separate paper... + See discussion: https://github.com/tensorflow/tpu/issues/494#issuecomment-532968956 ... I've opted for + changing the layer and argument names to 'drop path' rather than mix DropConnect as a layer name and use + 'survival rate' as the argument. + + """ + if drop_prob == 0.0 or not training: + return x + keep_prob = 1 - drop_prob + shape = (x.shape[0],) + (1,) * ( + x.ndim - 1 + ) # work with diff dim tensors, not just 2D ConvNets + random_tensor = x.new_empty(shape).bernoulli_(keep_prob) + if keep_prob > 0.0 and scale_by_keep: + random_tensor.div_(keep_prob) + return x * random_tensor + + +class DropPath(nn.Module): + """Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks).""" + + def __init__(self, drop_prob: float = 0.0, scale_by_keep: bool = True): + super(DropPath, self).__init__() + self.drop_prob = drop_prob + self.scale_by_keep = scale_by_keep + + def forward(self, x): + return drop_path(x, self.drop_prob, self.training, self.scale_by_keep) + + def extra_repr(self): + return f"drop_prob={round(self.drop_prob,3):0.3f}" diff --git a/ldm_patched/pfn/architecture/timm/helpers.py b/ldm_patched/pfn/architecture/timm/helpers.py new file mode 100644 index 0000000000000000000000000000000000000000..cdafee0709165dd992118e3b09b8d26f70ea8a2a --- /dev/null +++ b/ldm_patched/pfn/architecture/timm/helpers.py @@ -0,0 +1,31 @@ +""" Layer/Module Helpers +Hacked together by / Copyright 2020 Ross Wightman +""" +import collections.abc +from itertools import repeat + + +# From PyTorch internals +def _ntuple(n): + def parse(x): + if isinstance(x, collections.abc.Iterable) and not isinstance(x, str): + return x + return tuple(repeat(x, n)) + + return parse + + +to_1tuple = _ntuple(1) +to_2tuple = _ntuple(2) +to_3tuple = _ntuple(3) +to_4tuple = _ntuple(4) +to_ntuple = _ntuple + + +def make_divisible(v, divisor=8, min_value=None, round_limit=0.9): + min_value = min_value or divisor + new_v = max(min_value, int(v + divisor / 2) // divisor * divisor) + # Make sure that round down does not go down by more than 10%. + if new_v < round_limit * v: + new_v += divisor + return new_v diff --git a/ldm_patched/pfn/architecture/timm/weight_init.py b/ldm_patched/pfn/architecture/timm/weight_init.py new file mode 100644 index 0000000000000000000000000000000000000000..b0169774657d86c1946008e746f2f4f7e833a44c --- /dev/null +++ b/ldm_patched/pfn/architecture/timm/weight_init.py @@ -0,0 +1,128 @@ +import math +import warnings + +import torch +from torch.nn.init import _calculate_fan_in_and_fan_out + + +def _no_grad_trunc_normal_(tensor, mean, std, a, b): + # Cut & paste from PyTorch official master until it's in a few official releases - RW + # Method based on https://people.sc.fsu.edu/~jburkardt/presentations/truncated_normal.pdf + def norm_cdf(x): + # Computes standard normal cumulative distribution function + return (1.0 + math.erf(x / math.sqrt(2.0))) / 2.0 + + if (mean < a - 2 * std) or (mean > b + 2 * std): + warnings.warn( + "mean is more than 2 std from [a, b] in nn.init.trunc_normal_. " + "The distribution of values may be incorrect.", + stacklevel=2, + ) + + with torch.no_grad(): + # Values are generated by using a truncated uniform distribution and + # then using the inverse CDF for the normal distribution. + # Get upper and lower cdf values + l = norm_cdf((a - mean) / std) + u = norm_cdf((b - mean) / std) + + # Uniformly fill tensor with values from [l, u], then translate to + # [2l-1, 2u-1]. + tensor.uniform_(2 * l - 1, 2 * u - 1) + + # Use inverse cdf transform for normal distribution to get truncated + # standard normal + tensor.erfinv_() + + # Transform to proper mean, std + tensor.mul_(std * math.sqrt(2.0)) + tensor.add_(mean) + + # Clamp to ensure it's in the proper range + tensor.clamp_(min=a, max=b) + return tensor + + +def trunc_normal_( + tensor: torch.Tensor, mean=0.0, std=1.0, a=-2.0, b=2.0 +) -> torch.Tensor: + r"""Fills the input Tensor with values drawn from a truncated + normal distribution. The values are effectively drawn from the + normal distribution :math:`\mathcal{N}(\text{mean}, \text{std}^2)` + with values outside :math:`[a, b]` redrawn until they are within + the bounds. The method used for generating the random values works + best when :math:`a \leq \text{mean} \leq b`. + + NOTE: this impl is similar to the PyTorch trunc_normal_, the bounds [a, b] are + applied while sampling the normal with mean/std applied, therefore a, b args + should be adjusted to match the range of mean, std args. + + Args: + tensor: an n-dimensional `torch.Tensor` + mean: the mean of the normal distribution + std: the standard deviation of the normal distribution + a: the minimum cutoff value + b: the maximum cutoff value + Examples: + >>> w = torch.empty(3, 5) + >>> nn.init.trunc_normal_(w) + """ + return _no_grad_trunc_normal_(tensor, mean, std, a, b) + + +def trunc_normal_tf_( + tensor: torch.Tensor, mean=0.0, std=1.0, a=-2.0, b=2.0 +) -> torch.Tensor: + r"""Fills the input Tensor with values drawn from a truncated + normal distribution. The values are effectively drawn from the + normal distribution :math:`\mathcal{N}(\text{mean}, \text{std}^2)` + with values outside :math:`[a, b]` redrawn until they are within + the bounds. The method used for generating the random values works + best when :math:`a \leq \text{mean} \leq b`. + + NOTE: this 'tf' variant behaves closer to Tensorflow / JAX impl where the + bounds [a, b] are applied when sampling the normal distribution with mean=0, std=1.0 + and the result is subsquently scaled and shifted by the mean and std args. + + Args: + tensor: an n-dimensional `torch.Tensor` + mean: the mean of the normal distribution + std: the standard deviation of the normal distribution + a: the minimum cutoff value + b: the maximum cutoff value + Examples: + >>> w = torch.empty(3, 5) + >>> nn.init.trunc_normal_(w) + """ + _no_grad_trunc_normal_(tensor, 0, 1.0, a, b) + with torch.no_grad(): + tensor.mul_(std).add_(mean) + return tensor + + +def variance_scaling_(tensor, scale=1.0, mode="fan_in", distribution="normal"): + fan_in, fan_out = _calculate_fan_in_and_fan_out(tensor) + if mode == "fan_in": + denom = fan_in + elif mode == "fan_out": + denom = fan_out + elif mode == "fan_avg": + denom = (fan_in + fan_out) / 2 + + variance = scale / denom # type: ignore + + if distribution == "truncated_normal": + # constant is stddev of standard normal truncated to (-2, 2) + trunc_normal_tf_(tensor, std=math.sqrt(variance) / 0.87962566103423978) + elif distribution == "normal": + tensor.normal_(std=math.sqrt(variance)) + elif distribution == "uniform": + bound = math.sqrt(3 * variance) + # pylint: disable=invalid-unary-operand-type + tensor.uniform_(-bound, bound) + else: + raise ValueError(f"invalid distribution {distribution}") + + +def lecun_normal_(tensor): + variance_scaling_(tensor, mode="fan_in", distribution="truncated_normal") diff --git a/ldm_patched/pfn/model_loading.py b/ldm_patched/pfn/model_loading.py new file mode 100644 index 0000000000000000000000000000000000000000..e000871c1bfe66a07dc13b51ad709cb0de092a41 --- /dev/null +++ b/ldm_patched/pfn/model_loading.py @@ -0,0 +1,99 @@ +import logging as logger + +from .architecture.DAT import DAT +from .architecture.face.codeformer import CodeFormer +from .architecture.face.gfpganv1_clean_arch import GFPGANv1Clean +from .architecture.face.restoreformer_arch import RestoreFormer +from .architecture.HAT import HAT +from .architecture.LaMa import LaMa +from .architecture.OmniSR.OmniSR import OmniSR +from .architecture.RRDB import RRDBNet as ESRGAN +from .architecture.SCUNet import SCUNet +from .architecture.SPSR import SPSRNet as SPSR +from .architecture.SRVGG import SRVGGNetCompact as RealESRGANv2 +from .architecture.SwiftSRGAN import Generator as SwiftSRGAN +from .architecture.Swin2SR import Swin2SR +from .architecture.SwinIR import SwinIR +from .types import PyTorchModel + + +class UnsupportedModel(Exception): + pass + + +def load_state_dict(state_dict) -> PyTorchModel: + logger.debug(f"Loading state dict into pytorch model arch") + + state_dict_keys = list(state_dict.keys()) + + if "params_ema" in state_dict_keys: + state_dict = state_dict["params_ema"] + elif "params-ema" in state_dict_keys: + state_dict = state_dict["params-ema"] + elif "params" in state_dict_keys: + state_dict = state_dict["params"] + + state_dict_keys = list(state_dict.keys()) + # SRVGGNet Real-ESRGAN (v2) + if "body.0.weight" in state_dict_keys and "body.1.weight" in state_dict_keys: + model = RealESRGANv2(state_dict) + # SPSR (ESRGAN with lots of extra layers) + elif "f_HR_conv1.0.weight" in state_dict: + model = SPSR(state_dict) + # Swift-SRGAN + elif ( + "model" in state_dict_keys + and "initial.cnn.depthwise.weight" in state_dict["model"].keys() + ): + model = SwiftSRGAN(state_dict) + # SwinIR, Swin2SR, HAT + elif "layers.0.residual_group.blocks.0.norm1.weight" in state_dict_keys: + if ( + "layers.0.residual_group.blocks.0.conv_block.cab.0.weight" + in state_dict_keys + ): + model = HAT(state_dict) + elif "patch_embed.proj.weight" in state_dict_keys: + model = Swin2SR(state_dict) + else: + model = SwinIR(state_dict) + # GFPGAN + elif ( + "toRGB.0.weight" in state_dict_keys + and "stylegan_decoder.style_mlp.1.weight" in state_dict_keys + ): + model = GFPGANv1Clean(state_dict) + # RestoreFormer + elif ( + "encoder.conv_in.weight" in state_dict_keys + and "encoder.down.0.block.0.norm1.weight" in state_dict_keys + ): + model = RestoreFormer(state_dict) + elif ( + "encoder.blocks.0.weight" in state_dict_keys + and "quantize.embedding.weight" in state_dict_keys + ): + model = CodeFormer(state_dict) + # LaMa + elif ( + "model.model.1.bn_l.running_mean" in state_dict_keys + or "generator.model.1.bn_l.running_mean" in state_dict_keys + ): + model = LaMa(state_dict) + # Omni-SR + elif "residual_layer.0.residual_layer.0.layer.0.fn.0.weight" in state_dict_keys: + model = OmniSR(state_dict) + # SCUNet + elif "m_head.0.weight" in state_dict_keys and "m_tail.0.weight" in state_dict_keys: + model = SCUNet(state_dict) + # DAT + elif "layers.0.blocks.2.attn.attn_mask_0" in state_dict_keys: + model = DAT(state_dict) + # Regular ESRGAN, "new-arch" ESRGAN, Real-ESRGAN v1 + else: + try: + model = ESRGAN(state_dict) + except: + # pylint: disable=raise-missing-from + raise UnsupportedModel + return model diff --git a/ldm_patched/pfn/types.py b/ldm_patched/pfn/types.py new file mode 100644 index 0000000000000000000000000000000000000000..193333b9e8049d9558ca2ea253d41ee44b0b294b --- /dev/null +++ b/ldm_patched/pfn/types.py @@ -0,0 +1,69 @@ +from typing import Union + +from .architecture.DAT import DAT +from .architecture.face.codeformer import CodeFormer +from .architecture.face.gfpganv1_clean_arch import GFPGANv1Clean +from .architecture.face.restoreformer_arch import RestoreFormer +from .architecture.HAT import HAT +from .architecture.LaMa import LaMa +from .architecture.OmniSR.OmniSR import OmniSR +from .architecture.RRDB import RRDBNet as ESRGAN +from .architecture.SCUNet import SCUNet +from .architecture.SPSR import SPSRNet as SPSR +from .architecture.SRVGG import SRVGGNetCompact as RealESRGANv2 +from .architecture.SwiftSRGAN import Generator as SwiftSRGAN +from .architecture.Swin2SR import Swin2SR +from .architecture.SwinIR import SwinIR + +PyTorchSRModels = ( + RealESRGANv2, + SPSR, + SwiftSRGAN, + ESRGAN, + SwinIR, + Swin2SR, + HAT, + OmniSR, + SCUNet, + DAT, +) +PyTorchSRModel = Union[ + RealESRGANv2, + SPSR, + SwiftSRGAN, + ESRGAN, + SwinIR, + Swin2SR, + HAT, + OmniSR, + SCUNet, + DAT, +] + + +def is_pytorch_sr_model(model: object): + return isinstance(model, PyTorchSRModels) + + +PyTorchFaceModels = (GFPGANv1Clean, RestoreFormer, CodeFormer) +PyTorchFaceModel = Union[GFPGANv1Clean, RestoreFormer, CodeFormer] + + +def is_pytorch_face_model(model: object): + return isinstance(model, PyTorchFaceModels) + + +PyTorchInpaintModels = (LaMa,) +PyTorchInpaintModel = Union[LaMa] + + +def is_pytorch_inpaint_model(model: object): + return isinstance(model, PyTorchInpaintModels) + + +PyTorchModels = (*PyTorchSRModels, *PyTorchFaceModels, *PyTorchInpaintModels) +PyTorchModel = Union[PyTorchSRModel, PyTorchFaceModel, PyTorchInpaintModel] + + +def is_pytorch_model(model: object): + return isinstance(model, PyTorchModels) diff --git a/ldm_patched/t2ia/__pycache__/adapter.cpython-310.pyc b/ldm_patched/t2ia/__pycache__/adapter.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a0097c94d2de4a217893926c104b19c89211515c Binary files /dev/null and b/ldm_patched/t2ia/__pycache__/adapter.cpython-310.pyc differ diff --git a/ldm_patched/t2ia/adapter.py b/ldm_patched/t2ia/adapter.py new file mode 100644 index 0000000000000000000000000000000000000000..e9a606b1cd67fd9a955a0ea0a86d1bd5498d85e5 --- /dev/null +++ b/ldm_patched/t2ia/adapter.py @@ -0,0 +1,293 @@ +#taken from https://github.com/TencentARC/T2I-Adapter +import torch +import torch.nn as nn +from collections import OrderedDict + + +def conv_nd(dims, *args, **kwargs): + """ + Create a 1D, 2D, or 3D convolution module. + """ + if dims == 1: + return nn.Conv1d(*args, **kwargs) + elif dims == 2: + return nn.Conv2d(*args, **kwargs) + elif dims == 3: + return nn.Conv3d(*args, **kwargs) + raise ValueError(f"unsupported dimensions: {dims}") + + +def avg_pool_nd(dims, *args, **kwargs): + """ + Create a 1D, 2D, or 3D average pooling module. + """ + if dims == 1: + return nn.AvgPool1d(*args, **kwargs) + elif dims == 2: + return nn.AvgPool2d(*args, **kwargs) + elif dims == 3: + return nn.AvgPool3d(*args, **kwargs) + raise ValueError(f"unsupported dimensions: {dims}") + + +class Downsample(nn.Module): + """ + A downsampling layer with an optional convolution. + :param channels: channels in the inputs and outputs. + :param use_conv: a bool determining if a convolution is applied. + :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then + downsampling occurs in the inner-two dimensions. + """ + + def __init__(self, channels, use_conv, dims=2, out_channels=None, padding=1): + super().__init__() + self.channels = channels + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.dims = dims + stride = 2 if dims != 3 else (1, 2, 2) + if use_conv: + self.op = conv_nd( + dims, self.channels, self.out_channels, 3, stride=stride, padding=padding + ) + else: + assert self.channels == self.out_channels + self.op = avg_pool_nd(dims, kernel_size=stride, stride=stride) + + def forward(self, x): + assert x.shape[1] == self.channels + if not self.use_conv: + padding = [x.shape[2] % 2, x.shape[3] % 2] + self.op.padding = padding + + x = self.op(x) + return x + + +class ResnetBlock(nn.Module): + def __init__(self, in_c, out_c, down, ksize=3, sk=False, use_conv=True): + super().__init__() + ps = ksize // 2 + if in_c != out_c or sk == False: + self.in_conv = nn.Conv2d(in_c, out_c, ksize, 1, ps) + else: + # print('n_in') + self.in_conv = None + self.block1 = nn.Conv2d(out_c, out_c, 3, 1, 1) + self.act = nn.ReLU() + self.block2 = nn.Conv2d(out_c, out_c, ksize, 1, ps) + if sk == False: + self.skep = nn.Conv2d(in_c, out_c, ksize, 1, ps) + else: + self.skep = None + + self.down = down + if self.down == True: + self.down_opt = Downsample(in_c, use_conv=use_conv) + + def forward(self, x): + if self.down == True: + x = self.down_opt(x) + if self.in_conv is not None: # edit + x = self.in_conv(x) + + h = self.block1(x) + h = self.act(h) + h = self.block2(h) + if self.skep is not None: + return h + self.skep(x) + else: + return h + x + + +class Adapter(nn.Module): + def __init__(self, channels=[320, 640, 1280, 1280], nums_rb=3, cin=64, ksize=3, sk=False, use_conv=True, xl=True): + super(Adapter, self).__init__() + self.unshuffle_amount = 8 + resblock_no_downsample = [] + resblock_downsample = [3, 2, 1] + self.xl = xl + if self.xl: + self.unshuffle_amount = 16 + resblock_no_downsample = [1] + resblock_downsample = [2] + + self.input_channels = cin // (self.unshuffle_amount * self.unshuffle_amount) + self.unshuffle = nn.PixelUnshuffle(self.unshuffle_amount) + self.channels = channels + self.nums_rb = nums_rb + self.body = [] + for i in range(len(channels)): + for j in range(nums_rb): + if (i in resblock_downsample) and (j == 0): + self.body.append( + ResnetBlock(channels[i - 1], channels[i], down=True, ksize=ksize, sk=sk, use_conv=use_conv)) + elif (i in resblock_no_downsample) and (j == 0): + self.body.append( + ResnetBlock(channels[i - 1], channels[i], down=False, ksize=ksize, sk=sk, use_conv=use_conv)) + else: + self.body.append( + ResnetBlock(channels[i], channels[i], down=False, ksize=ksize, sk=sk, use_conv=use_conv)) + self.body = nn.ModuleList(self.body) + self.conv_in = nn.Conv2d(cin, channels[0], 3, 1, 1) + + def forward(self, x): + # unshuffle + x = self.unshuffle(x) + # extract features + features = [] + x = self.conv_in(x) + for i in range(len(self.channels)): + for j in range(self.nums_rb): + idx = i * self.nums_rb + j + x = self.body[idx](x) + if self.xl: + features.append(None) + if i == 0: + features.append(None) + features.append(None) + if i == 2: + features.append(None) + else: + features.append(None) + features.append(None) + features.append(x) + + return features + + +class LayerNorm(nn.LayerNorm): + """Subclass torch's LayerNorm to handle fp16.""" + + def forward(self, x: torch.Tensor): + orig_type = x.dtype + ret = super().forward(x.type(torch.float32)) + return ret.type(orig_type) + + +class QuickGELU(nn.Module): + + def forward(self, x: torch.Tensor): + return x * torch.sigmoid(1.702 * x) + + +class ResidualAttentionBlock(nn.Module): + + def __init__(self, d_model: int, n_head: int, attn_mask: torch.Tensor = None): + super().__init__() + + self.attn = nn.MultiheadAttention(d_model, n_head) + self.ln_1 = LayerNorm(d_model) + self.mlp = nn.Sequential( + OrderedDict([("c_fc", nn.Linear(d_model, d_model * 4)), ("gelu", QuickGELU()), + ("c_proj", nn.Linear(d_model * 4, d_model))])) + self.ln_2 = LayerNorm(d_model) + self.attn_mask = attn_mask + + def attention(self, x: torch.Tensor): + self.attn_mask = self.attn_mask.to(dtype=x.dtype, device=x.device) if self.attn_mask is not None else None + return self.attn(x, x, x, need_weights=False, attn_mask=self.attn_mask)[0] + + def forward(self, x: torch.Tensor): + x = x + self.attention(self.ln_1(x)) + x = x + self.mlp(self.ln_2(x)) + return x + + +class StyleAdapter(nn.Module): + + def __init__(self, width=1024, context_dim=768, num_head=8, n_layes=3, num_token=4): + super().__init__() + + scale = width ** -0.5 + self.transformer_layes = nn.Sequential(*[ResidualAttentionBlock(width, num_head) for _ in range(n_layes)]) + self.num_token = num_token + self.style_embedding = nn.Parameter(torch.randn(1, num_token, width) * scale) + self.ln_post = LayerNorm(width) + self.ln_pre = LayerNorm(width) + self.proj = nn.Parameter(scale * torch.randn(width, context_dim)) + + def forward(self, x): + # x shape [N, HW+1, C] + style_embedding = self.style_embedding + torch.zeros( + (x.shape[0], self.num_token, self.style_embedding.shape[-1]), device=x.device) + x = torch.cat([x, style_embedding], dim=1) + x = self.ln_pre(x) + x = x.permute(1, 0, 2) # NLD -> LND + x = self.transformer_layes(x) + x = x.permute(1, 0, 2) # LND -> NLD + + x = self.ln_post(x[:, -self.num_token:, :]) + x = x @ self.proj + + return x + + +class ResnetBlock_light(nn.Module): + def __init__(self, in_c): + super().__init__() + self.block1 = nn.Conv2d(in_c, in_c, 3, 1, 1) + self.act = nn.ReLU() + self.block2 = nn.Conv2d(in_c, in_c, 3, 1, 1) + + def forward(self, x): + h = self.block1(x) + h = self.act(h) + h = self.block2(h) + + return h + x + + +class extractor(nn.Module): + def __init__(self, in_c, inter_c, out_c, nums_rb, down=False): + super().__init__() + self.in_conv = nn.Conv2d(in_c, inter_c, 1, 1, 0) + self.body = [] + for _ in range(nums_rb): + self.body.append(ResnetBlock_light(inter_c)) + self.body = nn.Sequential(*self.body) + self.out_conv = nn.Conv2d(inter_c, out_c, 1, 1, 0) + self.down = down + if self.down == True: + self.down_opt = Downsample(in_c, use_conv=False) + + def forward(self, x): + if self.down == True: + x = self.down_opt(x) + x = self.in_conv(x) + x = self.body(x) + x = self.out_conv(x) + + return x + + +class Adapter_light(nn.Module): + def __init__(self, channels=[320, 640, 1280, 1280], nums_rb=3, cin=64): + super(Adapter_light, self).__init__() + self.unshuffle_amount = 8 + self.unshuffle = nn.PixelUnshuffle(self.unshuffle_amount) + self.input_channels = cin // (self.unshuffle_amount * self.unshuffle_amount) + self.channels = channels + self.nums_rb = nums_rb + self.body = [] + self.xl = False + + for i in range(len(channels)): + if i == 0: + self.body.append(extractor(in_c=cin, inter_c=channels[i]//4, out_c=channels[i], nums_rb=nums_rb, down=False)) + else: + self.body.append(extractor(in_c=channels[i-1], inter_c=channels[i]//4, out_c=channels[i], nums_rb=nums_rb, down=True)) + self.body = nn.ModuleList(self.body) + + def forward(self, x): + # unshuffle + x = self.unshuffle(x) + # extract features + features = [] + for i in range(len(self.channels)): + x = self.body[i](x) + features.append(None) + features.append(None) + features.append(x) + + return features diff --git a/ldm_patched/taesd/__pycache__/taesd.cpython-310.pyc b/ldm_patched/taesd/__pycache__/taesd.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b9971002e26bb07d1a8fafee039af995813caedd Binary files /dev/null and b/ldm_patched/taesd/__pycache__/taesd.cpython-310.pyc differ diff --git a/ldm_patched/taesd/taesd.py b/ldm_patched/taesd/taesd.py new file mode 100644 index 0000000000000000000000000000000000000000..0b4b885f74bd9d7f1cd2560b9f07daa579e634a3 --- /dev/null +++ b/ldm_patched/taesd/taesd.py @@ -0,0 +1,77 @@ +#!/usr/bin/env python3 +""" +Tiny AutoEncoder for Stable Diffusion +(DNN for encoding / decoding SD's latent space) +""" +import torch +import torch.nn as nn + +import ldm_patched.modules.utils +import ldm_patched.modules.ops + +def conv(n_in, n_out, **kwargs): + return ldm_patched.modules.ops.disable_weight_init.Conv2d(n_in, n_out, 3, padding=1, **kwargs) + +class Clamp(nn.Module): + def forward(self, x): + return torch.tanh(x / 3) * 3 + +class Block(nn.Module): + def __init__(self, n_in, n_out): + super().__init__() + self.conv = nn.Sequential(conv(n_in, n_out), nn.ReLU(), conv(n_out, n_out), nn.ReLU(), conv(n_out, n_out)) + self.skip = ldm_patched.modules.ops.disable_weight_init.Conv2d(n_in, n_out, 1, bias=False) if n_in != n_out else nn.Identity() + self.fuse = nn.ReLU() + def forward(self, x): + return self.fuse(self.conv(x) + self.skip(x)) + +def Encoder(): + return nn.Sequential( + conv(3, 64), Block(64, 64), + conv(64, 64, stride=2, bias=False), Block(64, 64), Block(64, 64), Block(64, 64), + conv(64, 64, stride=2, bias=False), Block(64, 64), Block(64, 64), Block(64, 64), + conv(64, 64, stride=2, bias=False), Block(64, 64), Block(64, 64), Block(64, 64), + conv(64, 4), + ) + +def Decoder(): + return nn.Sequential( + Clamp(), conv(4, 64), nn.ReLU(), + Block(64, 64), Block(64, 64), Block(64, 64), nn.Upsample(scale_factor=2), conv(64, 64, bias=False), + Block(64, 64), Block(64, 64), Block(64, 64), nn.Upsample(scale_factor=2), conv(64, 64, bias=False), + Block(64, 64), Block(64, 64), Block(64, 64), nn.Upsample(scale_factor=2), conv(64, 64, bias=False), + Block(64, 64), conv(64, 3), + ) + +class TAESD(nn.Module): + latent_magnitude = 3 + latent_shift = 0.5 + + def __init__(self, encoder_path=None, decoder_path=None): + """Initialize pretrained TAESD on the given device from the given checkpoints.""" + super().__init__() + self.taesd_encoder = Encoder() + self.taesd_decoder = Decoder() + self.vae_scale = torch.nn.Parameter(torch.tensor(1.0)) + if encoder_path is not None: + self.taesd_encoder.load_state_dict(ldm_patched.modules.utils.load_torch_file(encoder_path, safe_load=True)) + if decoder_path is not None: + self.taesd_decoder.load_state_dict(ldm_patched.modules.utils.load_torch_file(decoder_path, safe_load=True)) + + @staticmethod + def scale_latents(x): + """raw latents -> [0, 1]""" + return x.div(2 * TAESD.latent_magnitude).add(TAESD.latent_shift).clamp(0, 1) + + @staticmethod + def unscale_latents(x): + """[0, 1] -> raw latents""" + return x.sub(TAESD.latent_shift).mul(2 * TAESD.latent_magnitude) + + def decode(self, x): + x_sample = self.taesd_decoder(x * self.vae_scale) + x_sample = x_sample.sub(0.5).mul(2) + return x_sample + + def encode(self, x): + return self.taesd_encoder(x * 0.5 + 0.5) / self.vae_scale diff --git a/ldm_patched/unipc/__pycache__/uni_pc.cpython-310.pyc b/ldm_patched/unipc/__pycache__/uni_pc.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8e25d65c1851c47da6bec4e4b9853995550af6b1 Binary files /dev/null and b/ldm_patched/unipc/__pycache__/uni_pc.cpython-310.pyc differ diff --git a/ldm_patched/unipc/uni_pc.py b/ldm_patched/unipc/uni_pc.py new file mode 100644 index 0000000000000000000000000000000000000000..08bf0fc9e6787aec84500b4e3d24a4c8d253b433 --- /dev/null +++ b/ldm_patched/unipc/uni_pc.py @@ -0,0 +1,894 @@ +#code taken from: https://github.com/wl-zhao/UniPC and modified + +import torch +import torch.nn.functional as F +import math + +from tqdm.auto import trange, tqdm + + +class NoiseScheduleVP: + def __init__( + self, + schedule='discrete', + betas=None, + alphas_cumprod=None, + continuous_beta_0=0.1, + continuous_beta_1=20., + ): + """Create a wrapper class for the forward SDE (VP type). + + *** + Update: We support discrete-time diffusion models by implementing a picewise linear interpolation for log_alpha_t. + We recommend to use schedule='discrete' for the discrete-time diffusion models, especially for high-resolution images. + *** + + The forward SDE ensures that the condition distribution q_{t|0}(x_t | x_0) = N ( alpha_t * x_0, sigma_t^2 * I ). + We further define lambda_t = log(alpha_t) - log(sigma_t), which is the half-logSNR (described in the DPM-Solver paper). + Therefore, we implement the functions for computing alpha_t, sigma_t and lambda_t. For t in [0, T], we have: + + log_alpha_t = self.marginal_log_mean_coeff(t) + sigma_t = self.marginal_std(t) + lambda_t = self.marginal_lambda(t) + + Moreover, as lambda(t) is an invertible function, we also support its inverse function: + + t = self.inverse_lambda(lambda_t) + + =============================================================== + + We support both discrete-time DPMs (trained on n = 0, 1, ..., N-1) and continuous-time DPMs (trained on t in [t_0, T]). + + 1. For discrete-time DPMs: + + For discrete-time DPMs trained on n = 0, 1, ..., N-1, we convert the discrete steps to continuous time steps by: + t_i = (i + 1) / N + e.g. for N = 1000, we have t_0 = 1e-3 and T = t_{N-1} = 1. + We solve the corresponding diffusion ODE from time T = 1 to time t_0 = 1e-3. + + Args: + betas: A `torch.Tensor`. The beta array for the discrete-time DPM. (See the original DDPM paper for details) + alphas_cumprod: A `torch.Tensor`. The cumprod alphas for the discrete-time DPM. (See the original DDPM paper for details) + + Note that we always have alphas_cumprod = cumprod(betas). Therefore, we only need to set one of `betas` and `alphas_cumprod`. + + **Important**: Please pay special attention for the args for `alphas_cumprod`: + The `alphas_cumprod` is the \hat{alpha_n} arrays in the notations of DDPM. Specifically, DDPMs assume that + q_{t_n | 0}(x_{t_n} | x_0) = N ( \sqrt{\hat{alpha_n}} * x_0, (1 - \hat{alpha_n}) * I ). + Therefore, the notation \hat{alpha_n} is different from the notation alpha_t in DPM-Solver. In fact, we have + alpha_{t_n} = \sqrt{\hat{alpha_n}}, + and + log(alpha_{t_n}) = 0.5 * log(\hat{alpha_n}). + + + 2. For continuous-time DPMs: + + We support two types of VPSDEs: linear (DDPM) and cosine (improved-DDPM). The hyperparameters for the noise + schedule are the default settings in DDPM and improved-DDPM: + + Args: + beta_min: A `float` number. The smallest beta for the linear schedule. + beta_max: A `float` number. The largest beta for the linear schedule. + cosine_s: A `float` number. The hyperparameter in the cosine schedule. + cosine_beta_max: A `float` number. The hyperparameter in the cosine schedule. + T: A `float` number. The ending time of the forward process. + + =============================================================== + + Args: + schedule: A `str`. The noise schedule of the forward SDE. 'discrete' for discrete-time DPMs, + 'linear' or 'cosine' for continuous-time DPMs. + Returns: + A wrapper object of the forward SDE (VP type). + + =============================================================== + + Example: + + # For discrete-time DPMs, given betas (the beta array for n = 0, 1, ..., N - 1): + >>> ns = NoiseScheduleVP('discrete', betas=betas) + + # For discrete-time DPMs, given alphas_cumprod (the \hat{alpha_n} array for n = 0, 1, ..., N - 1): + >>> ns = NoiseScheduleVP('discrete', alphas_cumprod=alphas_cumprod) + + # For continuous-time DPMs (VPSDE), linear schedule: + >>> ns = NoiseScheduleVP('linear', continuous_beta_0=0.1, continuous_beta_1=20.) + + """ + + if schedule not in ['discrete', 'linear', 'cosine']: + raise ValueError("Unsupported noise schedule {}. The schedule needs to be 'discrete' or 'linear' or 'cosine'".format(schedule)) + + self.schedule = schedule + if schedule == 'discrete': + if betas is not None: + log_alphas = 0.5 * torch.log(1 - betas).cumsum(dim=0) + else: + assert alphas_cumprod is not None + log_alphas = 0.5 * torch.log(alphas_cumprod) + self.total_N = len(log_alphas) + self.T = 1. + self.t_array = torch.linspace(0., 1., self.total_N + 1)[1:].reshape((1, -1)) + self.log_alpha_array = log_alphas.reshape((1, -1,)) + else: + self.total_N = 1000 + self.beta_0 = continuous_beta_0 + self.beta_1 = continuous_beta_1 + self.cosine_s = 0.008 + self.cosine_beta_max = 999. + self.cosine_t_max = math.atan(self.cosine_beta_max * (1. + self.cosine_s) / math.pi) * 2. * (1. + self.cosine_s) / math.pi - self.cosine_s + self.cosine_log_alpha_0 = math.log(math.cos(self.cosine_s / (1. + self.cosine_s) * math.pi / 2.)) + self.schedule = schedule + if schedule == 'cosine': + # For the cosine schedule, T = 1 will have numerical issues. So we manually set the ending time T. + # Note that T = 0.9946 may be not the optimal setting. However, we find it works well. + self.T = 0.9946 + else: + self.T = 1. + + def marginal_log_mean_coeff(self, t): + """ + Compute log(alpha_t) of a given continuous-time label t in [0, T]. + """ + if self.schedule == 'discrete': + return interpolate_fn(t.reshape((-1, 1)), self.t_array.to(t.device), self.log_alpha_array.to(t.device)).reshape((-1)) + elif self.schedule == 'linear': + return -0.25 * t ** 2 * (self.beta_1 - self.beta_0) - 0.5 * t * self.beta_0 + elif self.schedule == 'cosine': + log_alpha_fn = lambda s: torch.log(torch.cos((s + self.cosine_s) / (1. + self.cosine_s) * math.pi / 2.)) + log_alpha_t = log_alpha_fn(t) - self.cosine_log_alpha_0 + return log_alpha_t + + def marginal_alpha(self, t): + """ + Compute alpha_t of a given continuous-time label t in [0, T]. + """ + return torch.exp(self.marginal_log_mean_coeff(t)) + + def marginal_std(self, t): + """ + Compute sigma_t of a given continuous-time label t in [0, T]. + """ + return torch.sqrt(1. - torch.exp(2. * self.marginal_log_mean_coeff(t))) + + def marginal_lambda(self, t): + """ + Compute lambda_t = log(alpha_t) - log(sigma_t) of a given continuous-time label t in [0, T]. + """ + log_mean_coeff = self.marginal_log_mean_coeff(t) + log_std = 0.5 * torch.log(1. - torch.exp(2. * log_mean_coeff)) + return log_mean_coeff - log_std + + def inverse_lambda(self, lamb): + """ + Compute the continuous-time label t in [0, T] of a given half-logSNR lambda_t. + """ + if self.schedule == 'linear': + tmp = 2. * (self.beta_1 - self.beta_0) * torch.logaddexp(-2. * lamb, torch.zeros((1,)).to(lamb)) + Delta = self.beta_0**2 + tmp + return tmp / (torch.sqrt(Delta) + self.beta_0) / (self.beta_1 - self.beta_0) + elif self.schedule == 'discrete': + log_alpha = -0.5 * torch.logaddexp(torch.zeros((1,)).to(lamb.device), -2. * lamb) + t = interpolate_fn(log_alpha.reshape((-1, 1)), torch.flip(self.log_alpha_array.to(lamb.device), [1]), torch.flip(self.t_array.to(lamb.device), [1])) + return t.reshape((-1,)) + else: + log_alpha = -0.5 * torch.logaddexp(-2. * lamb, torch.zeros((1,)).to(lamb)) + t_fn = lambda log_alpha_t: torch.arccos(torch.exp(log_alpha_t + self.cosine_log_alpha_0)) * 2. * (1. + self.cosine_s) / math.pi - self.cosine_s + t = t_fn(log_alpha) + return t + + +def model_wrapper( + model, + noise_schedule, + model_type="noise", + model_kwargs={}, + guidance_type="uncond", + condition=None, + unconditional_condition=None, + guidance_scale=1., + classifier_fn=None, + classifier_kwargs={}, +): + """Create a wrapper function for the noise prediction model. + + DPM-Solver needs to solve the continuous-time diffusion ODEs. For DPMs trained on discrete-time labels, we need to + firstly wrap the model function to a noise prediction model that accepts the continuous time as the input. + + We support four types of the diffusion model by setting `model_type`: + + 1. "noise": noise prediction model. (Trained by predicting noise). + + 2. "x_start": data prediction model. (Trained by predicting the data x_0 at time 0). + + 3. "v": velocity prediction model. (Trained by predicting the velocity). + The "v" prediction is derivation detailed in Appendix D of [1], and is used in Imagen-Video [2]. + + [1] Salimans, Tim, and Jonathan Ho. "Progressive distillation for fast sampling of diffusion models." + arXiv preprint arXiv:2202.00512 (2022). + [2] Ho, Jonathan, et al. "Imagen Video: High Definition Video Generation with Diffusion Models." + arXiv preprint arXiv:2210.02303 (2022). + + 4. "score": marginal score function. (Trained by denoising score matching). + Note that the score function and the noise prediction model follows a simple relationship: + ``` + noise(x_t, t) = -sigma_t * score(x_t, t) + ``` + + We support three types of guided sampling by DPMs by setting `guidance_type`: + 1. "uncond": unconditional sampling by DPMs. + The input `model` has the following format: + `` + model(x, t_input, **model_kwargs) -> noise | x_start | v | score + `` + + 2. "classifier": classifier guidance sampling [3] by DPMs and another classifier. + The input `model` has the following format: + `` + model(x, t_input, **model_kwargs) -> noise | x_start | v | score + `` + + The input `classifier_fn` has the following format: + `` + classifier_fn(x, t_input, cond, **classifier_kwargs) -> logits(x, t_input, cond) + `` + + [3] P. Dhariwal and A. Q. Nichol, "Diffusion models beat GANs on image synthesis," + in Advances in Neural Information Processing Systems, vol. 34, 2021, pp. 8780-8794. + + 3. "classifier-free": classifier-free guidance sampling by conditional DPMs. + The input `model` has the following format: + `` + model(x, t_input, cond, **model_kwargs) -> noise | x_start | v | score + `` + And if cond == `unconditional_condition`, the model output is the unconditional DPM output. + + [4] Ho, Jonathan, and Tim Salimans. "Classifier-free diffusion guidance." + arXiv preprint arXiv:2207.12598 (2022). + + + The `t_input` is the time label of the model, which may be discrete-time labels (i.e. 0 to 999) + or continuous-time labels (i.e. epsilon to T). + + We wrap the model function to accept only `x` and `t_continuous` as inputs, and outputs the predicted noise: + `` + def model_fn(x, t_continuous) -> noise: + t_input = get_model_input_time(t_continuous) + return noise_pred(model, x, t_input, **model_kwargs) + `` + where `t_continuous` is the continuous time labels (i.e. epsilon to T). And we use `model_fn` for DPM-Solver. + + =============================================================== + + Args: + model: A diffusion model with the corresponding format described above. + noise_schedule: A noise schedule object, such as NoiseScheduleVP. + model_type: A `str`. The parameterization type of the diffusion model. + "noise" or "x_start" or "v" or "score". + model_kwargs: A `dict`. A dict for the other inputs of the model function. + guidance_type: A `str`. The type of the guidance for sampling. + "uncond" or "classifier" or "classifier-free". + condition: A pytorch tensor. The condition for the guided sampling. + Only used for "classifier" or "classifier-free" guidance type. + unconditional_condition: A pytorch tensor. The condition for the unconditional sampling. + Only used for "classifier-free" guidance type. + guidance_scale: A `float`. The scale for the guided sampling. + classifier_fn: A classifier function. Only used for the classifier guidance. + classifier_kwargs: A `dict`. A dict for the other inputs of the classifier function. + Returns: + A noise prediction model that accepts the noised data and the continuous time as the inputs. + """ + + def get_model_input_time(t_continuous): + """ + Convert the continuous-time `t_continuous` (in [epsilon, T]) to the model input time. + For discrete-time DPMs, we convert `t_continuous` in [1 / N, 1] to `t_input` in [0, 1000 * (N - 1) / N]. + For continuous-time DPMs, we just use `t_continuous`. + """ + if noise_schedule.schedule == 'discrete': + return (t_continuous - 1. / noise_schedule.total_N) * 1000. + else: + return t_continuous + + def noise_pred_fn(x, t_continuous, cond=None): + if t_continuous.reshape((-1,)).shape[0] == 1: + t_continuous = t_continuous.expand((x.shape[0])) + t_input = get_model_input_time(t_continuous) + output = model(x, t_input, **model_kwargs) + if model_type == "noise": + return output + elif model_type == "x_start": + alpha_t, sigma_t = noise_schedule.marginal_alpha(t_continuous), noise_schedule.marginal_std(t_continuous) + dims = x.dim() + return (x - expand_dims(alpha_t, dims) * output) / expand_dims(sigma_t, dims) + elif model_type == "v": + alpha_t, sigma_t = noise_schedule.marginal_alpha(t_continuous), noise_schedule.marginal_std(t_continuous) + dims = x.dim() + return expand_dims(alpha_t, dims) * output + expand_dims(sigma_t, dims) * x + elif model_type == "score": + sigma_t = noise_schedule.marginal_std(t_continuous) + dims = x.dim() + return -expand_dims(sigma_t, dims) * output + + def cond_grad_fn(x, t_input): + """ + Compute the gradient of the classifier, i.e. nabla_{x} log p_t(cond | x_t). + """ + with torch.enable_grad(): + x_in = x.detach().requires_grad_(True) + log_prob = classifier_fn(x_in, t_input, condition, **classifier_kwargs) + return torch.autograd.grad(log_prob.sum(), x_in)[0] + + def model_fn(x, t_continuous): + """ + The noise predicition model function that is used for DPM-Solver. + """ + if t_continuous.reshape((-1,)).shape[0] == 1: + t_continuous = t_continuous.expand((x.shape[0])) + if guidance_type == "uncond": + return noise_pred_fn(x, t_continuous) + elif guidance_type == "classifier": + assert classifier_fn is not None + t_input = get_model_input_time(t_continuous) + cond_grad = cond_grad_fn(x, t_input) + sigma_t = noise_schedule.marginal_std(t_continuous) + noise = noise_pred_fn(x, t_continuous) + return noise - guidance_scale * expand_dims(sigma_t, dims=cond_grad.dim()) * cond_grad + elif guidance_type == "classifier-free": + if guidance_scale == 1. or unconditional_condition is None: + return noise_pred_fn(x, t_continuous, cond=condition) + else: + x_in = torch.cat([x] * 2) + t_in = torch.cat([t_continuous] * 2) + c_in = torch.cat([unconditional_condition, condition]) + noise_uncond, noise = noise_pred_fn(x_in, t_in, cond=c_in).chunk(2) + return noise_uncond + guidance_scale * (noise - noise_uncond) + + assert model_type in ["noise", "x_start", "v"] + assert guidance_type in ["uncond", "classifier", "classifier-free"] + return model_fn + + +class UniPC: + def __init__( + self, + model_fn, + noise_schedule, + predict_x0=True, + thresholding=False, + max_val=1., + variant='bh1', + noise_mask=None, + masked_image=None, + noise=None, + ): + """Construct a UniPC. + + We support both data_prediction and noise_prediction. + """ + self.model = model_fn + self.noise_schedule = noise_schedule + self.variant = variant + self.predict_x0 = predict_x0 + self.thresholding = thresholding + self.max_val = max_val + self.noise_mask = noise_mask + self.masked_image = masked_image + self.noise = noise + + def dynamic_thresholding_fn(self, x0, t=None): + """ + The dynamic thresholding method. + """ + dims = x0.dim() + p = self.dynamic_thresholding_ratio + s = torch.quantile(torch.abs(x0).reshape((x0.shape[0], -1)), p, dim=1) + s = expand_dims(torch.maximum(s, self.thresholding_max_val * torch.ones_like(s).to(s.device)), dims) + x0 = torch.clamp(x0, -s, s) / s + return x0 + + def noise_prediction_fn(self, x, t): + """ + Return the noise prediction model. + """ + if self.noise_mask is not None: + return self.model(x, t) * self.noise_mask + else: + return self.model(x, t) + + def data_prediction_fn(self, x, t): + """ + Return the data prediction model (with thresholding). + """ + noise = self.noise_prediction_fn(x, t) + dims = x.dim() + alpha_t, sigma_t = self.noise_schedule.marginal_alpha(t), self.noise_schedule.marginal_std(t) + x0 = (x - expand_dims(sigma_t, dims) * noise) / expand_dims(alpha_t, dims) + if self.thresholding: + p = 0.995 # A hyperparameter in the paper of "Imagen" [1]. + s = torch.quantile(torch.abs(x0).reshape((x0.shape[0], -1)), p, dim=1) + s = expand_dims(torch.maximum(s, self.max_val * torch.ones_like(s).to(s.device)), dims) + x0 = torch.clamp(x0, -s, s) / s + if self.noise_mask is not None: + x0 = x0 * self.noise_mask + (1. - self.noise_mask) * self.masked_image + return x0 + + def model_fn(self, x, t): + """ + Convert the model to the noise prediction model or the data prediction model. + """ + if self.predict_x0: + return self.data_prediction_fn(x, t) + else: + return self.noise_prediction_fn(x, t) + + def get_time_steps(self, skip_type, t_T, t_0, N, device): + """Compute the intermediate time steps for sampling. + """ + if skip_type == 'logSNR': + lambda_T = self.noise_schedule.marginal_lambda(torch.tensor(t_T).to(device)) + lambda_0 = self.noise_schedule.marginal_lambda(torch.tensor(t_0).to(device)) + logSNR_steps = torch.linspace(lambda_T.cpu().item(), lambda_0.cpu().item(), N + 1).to(device) + return self.noise_schedule.inverse_lambda(logSNR_steps) + elif skip_type == 'time_uniform': + return torch.linspace(t_T, t_0, N + 1).to(device) + elif skip_type == 'time_quadratic': + t_order = 2 + t = torch.linspace(t_T**(1. / t_order), t_0**(1. / t_order), N + 1).pow(t_order).to(device) + return t + else: + raise ValueError("Unsupported skip_type {}, need to be 'logSNR' or 'time_uniform' or 'time_quadratic'".format(skip_type)) + + def get_orders_and_timesteps_for_singlestep_solver(self, steps, order, skip_type, t_T, t_0, device): + """ + Get the order of each step for sampling by the singlestep DPM-Solver. + """ + if order == 3: + K = steps // 3 + 1 + if steps % 3 == 0: + orders = [3,] * (K - 2) + [2, 1] + elif steps % 3 == 1: + orders = [3,] * (K - 1) + [1] + else: + orders = [3,] * (K - 1) + [2] + elif order == 2: + if steps % 2 == 0: + K = steps // 2 + orders = [2,] * K + else: + K = steps // 2 + 1 + orders = [2,] * (K - 1) + [1] + elif order == 1: + K = steps + orders = [1,] * steps + else: + raise ValueError("'order' must be '1' or '2' or '3'.") + if skip_type == 'logSNR': + # To reproduce the results in DPM-Solver paper + timesteps_outer = self.get_time_steps(skip_type, t_T, t_0, K, device) + else: + timesteps_outer = self.get_time_steps(skip_type, t_T, t_0, steps, device)[torch.cumsum(torch.tensor([0,] + orders), 0).to(device)] + return timesteps_outer, orders + + def denoise_to_zero_fn(self, x, s): + """ + Denoise at the final step, which is equivalent to solve the ODE from lambda_s to infty by first-order discretization. + """ + return self.data_prediction_fn(x, s) + + def multistep_uni_pc_update(self, x, model_prev_list, t_prev_list, t, order, **kwargs): + if len(t.shape) == 0: + t = t.view(-1) + if 'bh' in self.variant: + return self.multistep_uni_pc_bh_update(x, model_prev_list, t_prev_list, t, order, **kwargs) + else: + assert self.variant == 'vary_coeff' + return self.multistep_uni_pc_vary_update(x, model_prev_list, t_prev_list, t, order, **kwargs) + + def multistep_uni_pc_vary_update(self, x, model_prev_list, t_prev_list, t, order, use_corrector=True): + print(f'using unified predictor-corrector with order {order} (solver type: vary coeff)') + ns = self.noise_schedule + assert order <= len(model_prev_list) + + # first compute rks + t_prev_0 = t_prev_list[-1] + lambda_prev_0 = ns.marginal_lambda(t_prev_0) + lambda_t = ns.marginal_lambda(t) + model_prev_0 = model_prev_list[-1] + sigma_prev_0, sigma_t = ns.marginal_std(t_prev_0), ns.marginal_std(t) + log_alpha_t = ns.marginal_log_mean_coeff(t) + alpha_t = torch.exp(log_alpha_t) + + h = lambda_t - lambda_prev_0 + + rks = [] + D1s = [] + for i in range(1, order): + t_prev_i = t_prev_list[-(i + 1)] + model_prev_i = model_prev_list[-(i + 1)] + lambda_prev_i = ns.marginal_lambda(t_prev_i) + rk = (lambda_prev_i - lambda_prev_0) / h + rks.append(rk) + D1s.append((model_prev_i - model_prev_0) / rk) + + rks.append(1.) + rks = torch.tensor(rks, device=x.device) + + K = len(rks) + # build C matrix + C = [] + + col = torch.ones_like(rks) + for k in range(1, K + 1): + C.append(col) + col = col * rks / (k + 1) + C = torch.stack(C, dim=1) + + if len(D1s) > 0: + D1s = torch.stack(D1s, dim=1) # (B, K) + C_inv_p = torch.linalg.inv(C[:-1, :-1]) + A_p = C_inv_p + + if use_corrector: + print('using corrector') + C_inv = torch.linalg.inv(C) + A_c = C_inv + + hh = -h if self.predict_x0 else h + h_phi_1 = torch.expm1(hh) + h_phi_ks = [] + factorial_k = 1 + h_phi_k = h_phi_1 + for k in range(1, K + 2): + h_phi_ks.append(h_phi_k) + h_phi_k = h_phi_k / hh - 1 / factorial_k + factorial_k *= (k + 1) + + model_t = None + if self.predict_x0: + x_t_ = ( + sigma_t / sigma_prev_0 * x + - alpha_t * h_phi_1 * model_prev_0 + ) + # now predictor + x_t = x_t_ + if len(D1s) > 0: + # compute the residuals for predictor + for k in range(K - 1): + x_t = x_t - alpha_t * h_phi_ks[k + 1] * torch.einsum('bkchw,k->bchw', D1s, A_p[k]) + # now corrector + if use_corrector: + model_t = self.model_fn(x_t, t) + D1_t = (model_t - model_prev_0) + x_t = x_t_ + k = 0 + for k in range(K - 1): + x_t = x_t - alpha_t * h_phi_ks[k + 1] * torch.einsum('bkchw,k->bchw', D1s, A_c[k][:-1]) + x_t = x_t - alpha_t * h_phi_ks[K] * (D1_t * A_c[k][-1]) + else: + log_alpha_prev_0, log_alpha_t = ns.marginal_log_mean_coeff(t_prev_0), ns.marginal_log_mean_coeff(t) + x_t_ = ( + (torch.exp(log_alpha_t - log_alpha_prev_0)) * x + - (sigma_t * h_phi_1) * model_prev_0 + ) + # now predictor + x_t = x_t_ + if len(D1s) > 0: + # compute the residuals for predictor + for k in range(K - 1): + x_t = x_t - sigma_t * h_phi_ks[k + 1] * torch.einsum('bkchw,k->bchw', D1s, A_p[k]) + # now corrector + if use_corrector: + model_t = self.model_fn(x_t, t) + D1_t = (model_t - model_prev_0) + x_t = x_t_ + k = 0 + for k in range(K - 1): + x_t = x_t - sigma_t * h_phi_ks[k + 1] * torch.einsum('bkchw,k->bchw', D1s, A_c[k][:-1]) + x_t = x_t - sigma_t * h_phi_ks[K] * (D1_t * A_c[k][-1]) + return x_t, model_t + + def multistep_uni_pc_bh_update(self, x, model_prev_list, t_prev_list, t, order, x_t=None, use_corrector=True): + # print(f'using unified predictor-corrector with order {order} (solver type: B(h))') + ns = self.noise_schedule + assert order <= len(model_prev_list) + dims = x.dim() + + # first compute rks + t_prev_0 = t_prev_list[-1] + lambda_prev_0 = ns.marginal_lambda(t_prev_0) + lambda_t = ns.marginal_lambda(t) + model_prev_0 = model_prev_list[-1] + sigma_prev_0, sigma_t = ns.marginal_std(t_prev_0), ns.marginal_std(t) + log_alpha_prev_0, log_alpha_t = ns.marginal_log_mean_coeff(t_prev_0), ns.marginal_log_mean_coeff(t) + alpha_t = torch.exp(log_alpha_t) + + h = lambda_t - lambda_prev_0 + + rks = [] + D1s = [] + for i in range(1, order): + t_prev_i = t_prev_list[-(i + 1)] + model_prev_i = model_prev_list[-(i + 1)] + lambda_prev_i = ns.marginal_lambda(t_prev_i) + rk = ((lambda_prev_i - lambda_prev_0) / h)[0] + rks.append(rk) + D1s.append((model_prev_i - model_prev_0) / rk) + + rks.append(1.) + rks = torch.tensor(rks, device=x.device) + + R = [] + b = [] + + hh = -h[0] if self.predict_x0 else h[0] + h_phi_1 = torch.expm1(hh) # h\phi_1(h) = e^h - 1 + h_phi_k = h_phi_1 / hh - 1 + + factorial_i = 1 + + if self.variant == 'bh1': + B_h = hh + elif self.variant == 'bh2': + B_h = torch.expm1(hh) + else: + raise NotImplementedError() + + for i in range(1, order + 1): + R.append(torch.pow(rks, i - 1)) + b.append(h_phi_k * factorial_i / B_h) + factorial_i *= (i + 1) + h_phi_k = h_phi_k / hh - 1 / factorial_i + + R = torch.stack(R) + b = torch.tensor(b, device=x.device) + + # now predictor + use_predictor = len(D1s) > 0 and x_t is None + if len(D1s) > 0: + D1s = torch.stack(D1s, dim=1) # (B, K) + if x_t is None: + # for order 2, we use a simplified version + if order == 2: + rhos_p = torch.tensor([0.5], device=b.device) + else: + rhos_p = torch.linalg.solve(R[:-1, :-1], b[:-1]) + else: + D1s = None + + if use_corrector: + # print('using corrector') + # for order 1, we use a simplified version + if order == 1: + rhos_c = torch.tensor([0.5], device=b.device) + else: + rhos_c = torch.linalg.solve(R, b) + + model_t = None + if self.predict_x0: + x_t_ = ( + expand_dims(sigma_t / sigma_prev_0, dims) * x + - expand_dims(alpha_t * h_phi_1, dims)* model_prev_0 + ) + + if x_t is None: + if use_predictor: + pred_res = torch.einsum('k,bkchw->bchw', rhos_p, D1s) + else: + pred_res = 0 + x_t = x_t_ - expand_dims(alpha_t * B_h, dims) * pred_res + + if use_corrector: + model_t = self.model_fn(x_t, t) + if D1s is not None: + corr_res = torch.einsum('k,bkchw->bchw', rhos_c[:-1], D1s) + else: + corr_res = 0 + D1_t = (model_t - model_prev_0) + x_t = x_t_ - expand_dims(alpha_t * B_h, dims) * (corr_res + rhos_c[-1] * D1_t) + else: + x_t_ = ( + expand_dims(torch.exp(log_alpha_t - log_alpha_prev_0), dims) * x + - expand_dims(sigma_t * h_phi_1, dims) * model_prev_0 + ) + if x_t is None: + if use_predictor: + pred_res = torch.einsum('k,bkchw->bchw', rhos_p, D1s) + else: + pred_res = 0 + x_t = x_t_ - expand_dims(sigma_t * B_h, dims) * pred_res + + if use_corrector: + model_t = self.model_fn(x_t, t) + if D1s is not None: + corr_res = torch.einsum('k,bkchw->bchw', rhos_c[:-1], D1s) + else: + corr_res = 0 + D1_t = (model_t - model_prev_0) + x_t = x_t_ - expand_dims(sigma_t * B_h, dims) * (corr_res + rhos_c[-1] * D1_t) + return x_t, model_t + + + def sample(self, x, timesteps, t_start=None, t_end=None, order=3, skip_type='time_uniform', + method='singlestep', lower_order_final=True, denoise_to_zero=False, solver_type='dpm_solver', + atol=0.0078, rtol=0.05, corrector=False, callback=None, disable_pbar=False + ): + # t_0 = 1. / self.noise_schedule.total_N if t_end is None else t_end + # t_T = self.noise_schedule.T if t_start is None else t_start + device = x.device + steps = len(timesteps) - 1 + if method == 'multistep': + assert steps >= order + # timesteps = self.get_time_steps(skip_type=skip_type, t_T=t_T, t_0=t_0, N=steps, device=device) + assert timesteps.shape[0] - 1 == steps + # with torch.no_grad(): + for step_index in trange(steps, disable=disable_pbar): + if self.noise_mask is not None: + x = x * self.noise_mask + (1. - self.noise_mask) * (self.masked_image * self.noise_schedule.marginal_alpha(timesteps[step_index]) + self.noise * self.noise_schedule.marginal_std(timesteps[step_index])) + if step_index == 0: + vec_t = timesteps[0].expand((x.shape[0])) + model_prev_list = [self.model_fn(x, vec_t)] + t_prev_list = [vec_t] + elif step_index < order: + init_order = step_index + # Init the first `order` values by lower order multistep DPM-Solver. + # for init_order in range(1, order): + vec_t = timesteps[init_order].expand(x.shape[0]) + x, model_x = self.multistep_uni_pc_update(x, model_prev_list, t_prev_list, vec_t, init_order, use_corrector=True) + if model_x is None: + model_x = self.model_fn(x, vec_t) + model_prev_list.append(model_x) + t_prev_list.append(vec_t) + else: + extra_final_step = 0 + if step_index == (steps - 1): + extra_final_step = 1 + for step in range(step_index, step_index + 1 + extra_final_step): + vec_t = timesteps[step].expand(x.shape[0]) + if lower_order_final: + step_order = min(order, steps + 1 - step) + else: + step_order = order + # print('this step order:', step_order) + if step == steps: + # print('do not run corrector at the last step') + use_corrector = False + else: + use_corrector = True + x, model_x = self.multistep_uni_pc_update(x, model_prev_list, t_prev_list, vec_t, step_order, use_corrector=use_corrector) + for i in range(order - 1): + t_prev_list[i] = t_prev_list[i + 1] + model_prev_list[i] = model_prev_list[i + 1] + t_prev_list[-1] = vec_t + # We do not need to evaluate the final model value. + if step < steps: + if model_x is None: + model_x = self.model_fn(x, vec_t) + model_prev_list[-1] = model_x + if callback is not None: + callback(step_index, model_prev_list[-1], x, steps) + else: + raise NotImplementedError() + # if denoise_to_zero: + # x = self.denoise_to_zero_fn(x, torch.ones((x.shape[0],)).to(device) * t_0) + return x + + +############################################################# +# other utility functions +############################################################# + +def interpolate_fn(x, xp, yp): + """ + A piecewise linear function y = f(x), using xp and yp as keypoints. + We implement f(x) in a differentiable way (i.e. applicable for autograd). + The function f(x) is well-defined for all x-axis. (For x beyond the bounds of xp, we use the outmost points of xp to define the linear function.) + + Args: + x: PyTorch tensor with shape [N, C], where N is the batch size, C is the number of channels (we use C = 1 for DPM-Solver). + xp: PyTorch tensor with shape [C, K], where K is the number of keypoints. + yp: PyTorch tensor with shape [C, K]. + Returns: + The function values f(x), with shape [N, C]. + """ + N, K = x.shape[0], xp.shape[1] + all_x = torch.cat([x.unsqueeze(2), xp.unsqueeze(0).repeat((N, 1, 1))], dim=2) + sorted_all_x, x_indices = torch.sort(all_x, dim=2) + x_idx = torch.argmin(x_indices, dim=2) + cand_start_idx = x_idx - 1 + start_idx = torch.where( + torch.eq(x_idx, 0), + torch.tensor(1, device=x.device), + torch.where( + torch.eq(x_idx, K), torch.tensor(K - 2, device=x.device), cand_start_idx, + ), + ) + end_idx = torch.where(torch.eq(start_idx, cand_start_idx), start_idx + 2, start_idx + 1) + start_x = torch.gather(sorted_all_x, dim=2, index=start_idx.unsqueeze(2)).squeeze(2) + end_x = torch.gather(sorted_all_x, dim=2, index=end_idx.unsqueeze(2)).squeeze(2) + start_idx2 = torch.where( + torch.eq(x_idx, 0), + torch.tensor(0, device=x.device), + torch.where( + torch.eq(x_idx, K), torch.tensor(K - 2, device=x.device), cand_start_idx, + ), + ) + y_positions_expanded = yp.unsqueeze(0).expand(N, -1, -1) + start_y = torch.gather(y_positions_expanded, dim=2, index=start_idx2.unsqueeze(2)).squeeze(2) + end_y = torch.gather(y_positions_expanded, dim=2, index=(start_idx2 + 1).unsqueeze(2)).squeeze(2) + cand = start_y + (x - start_x) * (end_y - start_y) / (end_x - start_x) + return cand + + +def expand_dims(v, dims): + """ + Expand the tensor `v` to the dim `dims`. + + Args: + `v`: a PyTorch tensor with shape [N]. + `dim`: a `int`. + Returns: + a PyTorch tensor with shape [N, 1, 1, ..., 1] and the total dimension is `dims`. + """ + return v[(...,) + (None,)*(dims - 1)] + + +class SigmaConvert: + schedule = "" + def marginal_log_mean_coeff(self, sigma): + return 0.5 * torch.log(1 / ((sigma * sigma) + 1)) + + def marginal_alpha(self, t): + return torch.exp(self.marginal_log_mean_coeff(t)) + + def marginal_std(self, t): + return torch.sqrt(1. - torch.exp(2. * self.marginal_log_mean_coeff(t))) + + def marginal_lambda(self, t): + """ + Compute lambda_t = log(alpha_t) - log(sigma_t) of a given continuous-time label t in [0, T]. + """ + log_mean_coeff = self.marginal_log_mean_coeff(t) + log_std = 0.5 * torch.log(1. - torch.exp(2. * log_mean_coeff)) + return log_mean_coeff - log_std + +def predict_eps_sigma(model, input, sigma_in, **kwargs): + sigma = sigma_in.view(sigma_in.shape[:1] + (1,) * (input.ndim - 1)) + input = input * ((sigma ** 2 + 1.0) ** 0.5) + return (input - model(input, sigma_in, **kwargs)) / sigma + + +def sample_unipc(model, noise, image, sigmas, max_denoise, extra_args=None, callback=None, disable=False, noise_mask=None, variant='bh1'): + timesteps = sigmas.clone() + if sigmas[-1] == 0: + timesteps = sigmas[:] + timesteps[-1] = 0.001 + else: + timesteps = sigmas.clone() + ns = SigmaConvert() + + if image is not None: + img = image * ns.marginal_alpha(timesteps[0]) + if max_denoise: + noise_mult = 1.0 + else: + noise_mult = ns.marginal_std(timesteps[0]) + img += noise * noise_mult + else: + img = noise + + model_type = "noise" + + model_fn = model_wrapper( + lambda input, sigma, **kwargs: predict_eps_sigma(model, input, sigma, **kwargs), + ns, + model_type=model_type, + guidance_type="uncond", + model_kwargs=extra_args, + ) + + order = min(3, len(timesteps) - 2) + uni_pc = UniPC(model_fn, ns, predict_x0=True, thresholding=False, noise_mask=noise_mask, masked_image=image, noise=noise, variant=variant) + x = uni_pc.sample(img, timesteps=timesteps, skip_type="time_uniform", method="multistep", order=order, lower_order_final=True, callback=callback, disable_pbar=disable) + x /= ns.marginal_alpha(timesteps[-1]) + return x diff --git a/ldm_patched/utils/__pycache__/latent_visualization.cpython-310.pyc b/ldm_patched/utils/__pycache__/latent_visualization.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e212ddbc6aa6107960fe9922275ba6a752d98af1 Binary files /dev/null and b/ldm_patched/utils/__pycache__/latent_visualization.cpython-310.pyc differ diff --git a/ldm_patched/utils/__pycache__/path_utils.cpython-310.pyc b/ldm_patched/utils/__pycache__/path_utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..aff780c528de93f53f8abf5369760d68030266cb Binary files /dev/null and b/ldm_patched/utils/__pycache__/path_utils.cpython-310.pyc differ diff --git a/ldm_patched/utils/latent_visualization.py b/ldm_patched/utils/latent_visualization.py new file mode 100644 index 0000000000000000000000000000000000000000..a1ad403a1d77fd1effe0ecd153d18ae3059ddae4 --- /dev/null +++ b/ldm_patched/utils/latent_visualization.py @@ -0,0 +1,97 @@ +import torch +from PIL import Image +import struct +import numpy as np +from ldm_patched.modules.args_parser import args, LatentPreviewMethod +from ldm_patched.taesd.taesd import TAESD +import ldm_patched.utils.path_utils +import ldm_patched.modules.utils + +MAX_PREVIEW_RESOLUTION = 512 + +class LatentPreviewer: + def decode_latent_to_preview(self, x0): + pass + + def decode_latent_to_preview_image(self, preview_format, x0): + preview_image = self.decode_latent_to_preview(x0) + return ("JPEG", preview_image, MAX_PREVIEW_RESOLUTION) + +class TAESDPreviewerImpl(LatentPreviewer): + def __init__(self, taesd): + self.taesd = taesd + + def decode_latent_to_preview(self, x0): + x_sample = self.taesd.decode(x0[:1])[0].detach() + x_sample = torch.clamp((x_sample + 1.0) / 2.0, min=0.0, max=1.0) + x_sample = 255. * np.moveaxis(x_sample.cpu().numpy(), 0, 2) + x_sample = x_sample.astype(np.uint8) + + preview_image = Image.fromarray(x_sample) + return preview_image + + +class Latent2RGBPreviewer(LatentPreviewer): + def __init__(self, latent_rgb_factors): + self.latent_rgb_factors = torch.tensor(latent_rgb_factors, device="cpu") + + def decode_latent_to_preview(self, x0): + latent_image = x0[0].permute(1, 2, 0).cpu() @ self.latent_rgb_factors + + latents_ubyte = (((latent_image + 1) / 2) + .clamp(0, 1) # change scale from -1..1 to 0..1 + .mul(0xFF) # to 0..255 + .byte()).cpu() + + return Image.fromarray(latents_ubyte.numpy()) + + +def get_previewer(device, latent_format): + previewer = None + method = args.preview_option + if method != LatentPreviewMethod.NoPreviews: + # TODO previewer methods + taesd_decoder_path = None + if latent_format.taesd_decoder_name is not None: + taesd_decoder_path = next( + (fn for fn in ldm_patched.utils.path_utils.get_filename_list("vae_approx") + if fn.startswith(latent_format.taesd_decoder_name)), + "" + ) + taesd_decoder_path = ldm_patched.utils.path_utils.get_full_path("vae_approx", taesd_decoder_path) + + if method == LatentPreviewMethod.Auto: + method = LatentPreviewMethod.Latent2RGB + if taesd_decoder_path: + method = LatentPreviewMethod.TAESD + + if method == LatentPreviewMethod.TAESD: + if taesd_decoder_path: + taesd = TAESD(None, taesd_decoder_path).to(device) + previewer = TAESDPreviewerImpl(taesd) + else: + print("Warning: TAESD previews enabled, but could not find models/vae_approx/{}".format(latent_format.taesd_decoder_name)) + + if previewer is None: + if latent_format.latent_rgb_factors is not None: + previewer = Latent2RGBPreviewer(latent_format.latent_rgb_factors) + return previewer + +def prepare_callback(model, steps, x0_output_dict=None): + preview_format = "JPEG" + if preview_format not in ["JPEG", "PNG"]: + preview_format = "JPEG" + + previewer = get_previewer(model.load_device, model.model.latent_format) + + pbar = ldm_patched.modules.utils.ProgressBar(steps) + def callback(step, x0, x, total_steps): + if x0_output_dict is not None: + x0_output_dict["x0"] = x0 + + preview_bytes = None + if previewer: + preview_bytes = previewer.decode_latent_to_preview_image(preview_format, x0) + pbar.update_absolute(step + 1, total_steps, preview_bytes) + return callback + diff --git a/ldm_patched/utils/path_utils.py b/ldm_patched/utils/path_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..6cae149b47c5efb959cdcb815aafb28f37307b9c --- /dev/null +++ b/ldm_patched/utils/path_utils.py @@ -0,0 +1,262 @@ +import os +import time + +supported_pt_extensions = set(['.ckpt', '.pt', '.bin', '.pth', '.safetensors']) + +folder_names_and_paths = {} + +base_path = os.getcwd() +models_dir = os.path.join(base_path, "models") +folder_names_and_paths["checkpoints"] = ([os.path.join(models_dir, "checkpoints")], supported_pt_extensions) +folder_names_and_paths["configs"] = ([os.path.join(models_dir, "configs")], [".yaml"]) + +folder_names_and_paths["loras"] = ([os.path.join(models_dir, "loras")], supported_pt_extensions) +folder_names_and_paths["vae"] = ([os.path.join(models_dir, "vae")], supported_pt_extensions) +folder_names_and_paths["clip"] = ([os.path.join(models_dir, "clip")], supported_pt_extensions) +folder_names_and_paths["unet"] = ([os.path.join(models_dir, "unet")], supported_pt_extensions) +folder_names_and_paths["clip_vision"] = ([os.path.join(models_dir, "clip_vision")], supported_pt_extensions) +folder_names_and_paths["style_models"] = ([os.path.join(models_dir, "style_models")], supported_pt_extensions) +folder_names_and_paths["embeddings"] = ([os.path.join(models_dir, "embeddings")], supported_pt_extensions) +folder_names_and_paths["diffusers"] = ([os.path.join(models_dir, "diffusers")], ["folder"]) +folder_names_and_paths["vae_approx"] = ([os.path.join(models_dir, "vae_approx")], supported_pt_extensions) + +folder_names_and_paths["controlnet"] = ([os.path.join(models_dir, "controlnet"), os.path.join(models_dir, "t2i_adapter")], supported_pt_extensions) +folder_names_and_paths["gligen"] = ([os.path.join(models_dir, "gligen")], supported_pt_extensions) + +folder_names_and_paths["upscale_models"] = ([os.path.join(models_dir, "upscale_models")], supported_pt_extensions) + +folder_names_and_paths["custom_nodes"] = ([os.path.join(base_path, "custom_nodes")], []) + +folder_names_and_paths["hypernetworks"] = ([os.path.join(models_dir, "hypernetworks")], supported_pt_extensions) + +folder_names_and_paths["photomaker"] = ([os.path.join(models_dir, "photomaker")], supported_pt_extensions) + +folder_names_and_paths["classifiers"] = ([os.path.join(models_dir, "classifiers")], {""}) + +output_directory = os.path.join(os.getcwd(), "output") +temp_directory = os.path.join(os.getcwd(), "temp") +input_directory = os.path.join(os.getcwd(), "input") +user_directory = os.path.join(os.getcwd(), "user") + +filename_list_cache = {} + +if not os.path.exists(input_directory): + try: + pass # os.makedirs(input_directory) + except: + print("Failed to create input directory") + +def set_output_directory(output_dir): + global output_directory + output_directory = output_dir + +def set_temp_directory(temp_dir): + global temp_directory + temp_directory = temp_dir + +def set_input_directory(input_dir): + global input_directory + input_directory = input_dir + +def get_output_directory(): + global output_directory + return output_directory + +def get_temp_directory(): + global temp_directory + return temp_directory + +def get_input_directory(): + global input_directory + return input_directory + + +#NOTE: used in http server so don't put folders that should not be accessed remotely +def get_directory_by_type(type_name): + if type_name == "output": + return get_output_directory() + if type_name == "temp": + return get_temp_directory() + if type_name == "input": + return get_input_directory() + return None + + +# determine base_dir rely on annotation if name is 'filename.ext [annotation]' format +# otherwise use default_path as base_dir +def annotated_filepath(name): + if name.endswith("[output]"): + base_dir = get_output_directory() + name = name[:-9] + elif name.endswith("[input]"): + base_dir = get_input_directory() + name = name[:-8] + elif name.endswith("[temp]"): + base_dir = get_temp_directory() + name = name[:-7] + else: + return name, None + + return name, base_dir + + +def get_annotated_filepath(name, default_dir=None): + name, base_dir = annotated_filepath(name) + + if base_dir is None: + if default_dir is not None: + base_dir = default_dir + else: + base_dir = get_input_directory() # fallback path + + return os.path.join(base_dir, name) + + +def exists_annotated_filepath(name): + name, base_dir = annotated_filepath(name) + + if base_dir is None: + base_dir = get_input_directory() # fallback path + + filepath = os.path.join(base_dir, name) + return os.path.exists(filepath) + + +def add_model_folder_path(folder_name, full_folder_path): + global folder_names_and_paths + if folder_name in folder_names_and_paths: + folder_names_and_paths[folder_name][0].append(full_folder_path) + else: + folder_names_and_paths[folder_name] = ([full_folder_path], set()) + +def get_folder_paths(folder_name): + return folder_names_and_paths[folder_name][0][:] + +def recursive_search(directory, excluded_dir_names=None): + if not os.path.isdir(directory): + return [], {} + + if excluded_dir_names is None: + excluded_dir_names = [] + + result = [] + dirs = {} + + # Attempt to add the initial directory to dirs with error handling + try: + dirs[directory] = os.path.getmtime(directory) + except FileNotFoundError: + print(f"Warning: Unable to access {directory}. Skipping this path.") + + for dirpath, subdirs, filenames in os.walk(directory, followlinks=True, topdown=True): + subdirs[:] = [d for d in subdirs if d not in excluded_dir_names] + for file_name in filenames: + relative_path = os.path.relpath(os.path.join(dirpath, file_name), directory) + result.append(relative_path) + + for d in subdirs: + path = os.path.join(dirpath, d) + try: + dirs[path] = os.path.getmtime(path) + except FileNotFoundError: + print(f"Warning: Unable to access {path}. Skipping this path.") + continue + return result, dirs + +def filter_files_extensions(files, extensions): + return sorted(list(filter(lambda a: os.path.splitext(a)[-1].lower() in extensions or len(extensions) == 0, files))) + + + +def get_full_path(folder_name, filename): + global folder_names_and_paths + if folder_name not in folder_names_and_paths: + return None + folders = folder_names_and_paths[folder_name] + filename = os.path.relpath(os.path.join("/", filename), "/") + for x in folders[0]: + full_path = os.path.join(x, filename) + if os.path.isfile(full_path): + return full_path + + return None + +def get_filename_list_(folder_name): + global folder_names_and_paths + output_list = set() + folders = folder_names_and_paths[folder_name] + output_folders = {} + for x in folders[0]: + files, folders_all = recursive_search(x, excluded_dir_names=[".git"]) + output_list.update(filter_files_extensions(files, folders[1])) + output_folders = {**output_folders, **folders_all} + + return (sorted(list(output_list)), output_folders, time.perf_counter()) + +def cached_filename_list_(folder_name): + global filename_list_cache + global folder_names_and_paths + if folder_name not in filename_list_cache: + return None + out = filename_list_cache[folder_name] + + for x in out[1]: + time_modified = out[1][x] + folder = x + if os.path.getmtime(folder) != time_modified: + return None + + folders = folder_names_and_paths[folder_name] + for x in folders[0]: + if os.path.isdir(x): + if x not in out[1]: + return None + + return out + +def get_filename_list(folder_name): + out = cached_filename_list_(folder_name) + if out is None: + out = get_filename_list_(folder_name) + global filename_list_cache + filename_list_cache[folder_name] = out + return list(out[0]) + +def get_save_image_path(filename_prefix, output_dir, image_width=0, image_height=0): + def map_filename(filename): + prefix_len = len(os.path.basename(filename_prefix)) + prefix = filename[:prefix_len + 1] + try: + digits = int(filename[prefix_len + 1:].split('_')[0]) + except: + digits = 0 + return (digits, prefix) + + def compute_vars(input, image_width, image_height): + input = input.replace("%width%", str(image_width)) + input = input.replace("%height%", str(image_height)) + return input + + filename_prefix = compute_vars(filename_prefix, image_width, image_height) + + subfolder = os.path.dirname(os.path.normpath(filename_prefix)) + filename = os.path.basename(os.path.normpath(filename_prefix)) + + full_output_folder = os.path.join(output_dir, subfolder) + + if os.path.commonpath((output_dir, os.path.abspath(full_output_folder))) != output_dir: + err = "**** ERROR: Saving image outside the output folder is not allowed." + \ + "\n full_output_folder: " + os.path.abspath(full_output_folder) + \ + "\n output_dir: " + output_dir + \ + "\n commonpath: " + os.path.commonpath((output_dir, os.path.abspath(full_output_folder))) + print(err) + raise Exception(err) + + try: + counter = max(filter(lambda a: a[1][:-1] == filename and a[1][-1] == "_", map(map_filename, os.listdir(full_output_folder))))[0] + 1 + except ValueError: + counter = 1 + except FileNotFoundError: + os.makedirs(full_output_folder, exist_ok=True) + counter = 1 + return full_output_folder, filename, counter, subfolder, filename_prefix diff --git a/modules/__pycache__/adm_patch.cpython-310.pyc b/modules/__pycache__/adm_patch.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8d2b38d254ca3d14347cc67bb88762a8c7e2c976 Binary files /dev/null and b/modules/__pycache__/adm_patch.cpython-310.pyc differ diff --git a/modules/__pycache__/advanced_parameters.cpython-310.pyc b/modules/__pycache__/advanced_parameters.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e362b3b56507126b5e3f629bed6b1c8902c574f2 Binary files /dev/null and b/modules/__pycache__/advanced_parameters.cpython-310.pyc differ diff --git a/modules/__pycache__/anisotropic.cpython-310.pyc b/modules/__pycache__/anisotropic.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..405017a6ab75c24d3963ca331dda1d11a869fcb2 Binary files /dev/null and b/modules/__pycache__/anisotropic.cpython-310.pyc differ diff --git a/modules/__pycache__/async_worker.cpython-310.pyc b/modules/__pycache__/async_worker.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7b4ff7fbccf61979af876f0cba87a1f451e7ec22 Binary files /dev/null and b/modules/__pycache__/async_worker.cpython-310.pyc differ diff --git a/modules/__pycache__/async_worker.cpython-312.pyc b/modules/__pycache__/async_worker.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..438cd8d8901cca4baf0a25db1fd4e6a220871a65 Binary files /dev/null and b/modules/__pycache__/async_worker.cpython-312.pyc differ diff --git a/modules/__pycache__/auth.cpython-310.pyc b/modules/__pycache__/auth.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5c19abf6d9d71997584f97d728a6db91bee37605 Binary files /dev/null and b/modules/__pycache__/auth.cpython-310.pyc differ diff --git a/modules/__pycache__/config.cpython-310.pyc b/modules/__pycache__/config.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d402628ef5f76168b2e4e7c4c6ae63941c78c185 Binary files /dev/null and b/modules/__pycache__/config.cpython-310.pyc differ diff --git a/modules/__pycache__/config.cpython-312.pyc b/modules/__pycache__/config.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..960788c49ef15e7b023fcbcf698eeb8ae4e8f6fa Binary files /dev/null and b/modules/__pycache__/config.cpython-312.pyc differ diff --git a/modules/__pycache__/constants.cpython-310.pyc b/modules/__pycache__/constants.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ebd2df806cdfd9f3e16478be7b36a152c086881c Binary files /dev/null and b/modules/__pycache__/constants.cpython-310.pyc differ diff --git a/modules/__pycache__/core.cpython-310.pyc b/modules/__pycache__/core.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a30c98835502b0356d1ef4c1b8e3d13d7505cb86 Binary files /dev/null and b/modules/__pycache__/core.cpython-310.pyc differ diff --git a/modules/__pycache__/cv2win32.cpython-310.pyc b/modules/__pycache__/cv2win32.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..412fccf2850881203430b137145973d83be84649 Binary files /dev/null and b/modules/__pycache__/cv2win32.cpython-310.pyc differ diff --git a/modules/__pycache__/default_pipeline.cpython-310.pyc b/modules/__pycache__/default_pipeline.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b212fffe46ad1560f7d8d06783921ad81b311359 Binary files /dev/null and b/modules/__pycache__/default_pipeline.cpython-310.pyc differ diff --git a/modules/__pycache__/expansion.cpython-310.pyc b/modules/__pycache__/expansion.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..736cd379021b182cdaff95c6c3ed86572342d5fe Binary files /dev/null and b/modules/__pycache__/expansion.cpython-310.pyc differ diff --git a/modules/__pycache__/filters.cpython-310.pyc b/modules/__pycache__/filters.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ae7fd411792cab7d5334074a56694cfe0946a0a5 Binary files /dev/null and b/modules/__pycache__/filters.cpython-310.pyc differ diff --git a/modules/__pycache__/flags.cpython-310.pyc b/modules/__pycache__/flags.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..62eacd194b9ec686a9d4256d227c4370c82668d4 Binary files /dev/null and b/modules/__pycache__/flags.cpython-310.pyc differ diff --git a/modules/__pycache__/flags.cpython-312.pyc b/modules/__pycache__/flags.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f06e1ba6a88967d9f4b32c16bed764ad259ff663 Binary files /dev/null and b/modules/__pycache__/flags.cpython-312.pyc differ diff --git a/modules/__pycache__/gradio_hijack.cpython-310.pyc b/modules/__pycache__/gradio_hijack.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ec0e0bffcdab99082544ec5f1d278e87a6c0375b Binary files /dev/null and b/modules/__pycache__/gradio_hijack.cpython-310.pyc differ diff --git a/modules/__pycache__/html.cpython-310.pyc b/modules/__pycache__/html.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ee802bf574d66658ec47bbba56ca98a9dec84293 Binary files /dev/null and b/modules/__pycache__/html.cpython-310.pyc differ diff --git a/modules/__pycache__/html.cpython-312.pyc b/modules/__pycache__/html.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2f9d9506bf08b11058af784dfcb546a028ebcf3a Binary files /dev/null and b/modules/__pycache__/html.cpython-312.pyc differ diff --git a/modules/__pycache__/inpaint_worker.cpython-310.pyc b/modules/__pycache__/inpaint_worker.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..85bfd8495da753191b425412e6a6668354ee9a9f Binary files /dev/null and b/modules/__pycache__/inpaint_worker.cpython-310.pyc differ diff --git a/modules/__pycache__/launch_util.cpython-310.pyc b/modules/__pycache__/launch_util.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d77ad1d6e06b351e9d231250571f75a1354bfa23 Binary files /dev/null and b/modules/__pycache__/launch_util.cpython-310.pyc differ diff --git a/modules/__pycache__/launch_util.cpython-312.pyc b/modules/__pycache__/launch_util.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a5faab9b84b485947f9c72505579162eb5ac86a0 Binary files /dev/null and b/modules/__pycache__/launch_util.cpython-312.pyc differ diff --git a/modules/__pycache__/localization.cpython-310.pyc b/modules/__pycache__/localization.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..50322b1e81cc765ff1b3e22f4f2c95b04420ae9c Binary files /dev/null and b/modules/__pycache__/localization.cpython-310.pyc differ diff --git a/modules/__pycache__/lora.cpython-310.pyc b/modules/__pycache__/lora.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0b63f290dcbb015e63c3872c92ff5cd89810ab52 Binary files /dev/null and b/modules/__pycache__/lora.cpython-310.pyc differ diff --git a/modules/__pycache__/meta_parser.cpython-310.pyc b/modules/__pycache__/meta_parser.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..179c6aa8f0db3ae9ce5af088d109636ffa92f189 Binary files /dev/null and b/modules/__pycache__/meta_parser.cpython-310.pyc differ diff --git a/modules/__pycache__/model_loader.cpython-310.pyc b/modules/__pycache__/model_loader.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..55a674302d732059efa9e6667a67a6d405831e88 Binary files /dev/null and b/modules/__pycache__/model_loader.cpython-310.pyc differ diff --git a/modules/__pycache__/model_loader.cpython-312.pyc b/modules/__pycache__/model_loader.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..802988ea2fdb33818c21dbd0d2593fc79173c123 Binary files /dev/null and b/modules/__pycache__/model_loader.cpython-312.pyc differ diff --git a/modules/__pycache__/ops.cpython-310.pyc b/modules/__pycache__/ops.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..40fd7d2955432ae501ea2e7a5c02f28b69838a1d Binary files /dev/null and b/modules/__pycache__/ops.cpython-310.pyc differ diff --git a/modules/__pycache__/patch.cpython-310.pyc b/modules/__pycache__/patch.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..acb1b1baaad8106c9672dc1f65b8632a769eb4f7 Binary files /dev/null and b/modules/__pycache__/patch.cpython-310.pyc differ diff --git a/modules/__pycache__/patch.cpython-312.pyc b/modules/__pycache__/patch.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0ccedcb888de32f4cc15e792a98c96a0913210fa Binary files /dev/null and b/modules/__pycache__/patch.cpython-312.pyc differ diff --git a/modules/__pycache__/patch_clip.cpython-310.pyc b/modules/__pycache__/patch_clip.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..00f662bca01f5a747607e1aa234a3ee6667312d9 Binary files /dev/null and b/modules/__pycache__/patch_clip.cpython-310.pyc differ diff --git a/modules/__pycache__/patch_precision.cpython-310.pyc b/modules/__pycache__/patch_precision.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..56ef76fd17b7c3425ec37fd666eca3ac156323b9 Binary files /dev/null and b/modules/__pycache__/patch_precision.cpython-310.pyc differ diff --git a/modules/__pycache__/path.cpython-310.pyc b/modules/__pycache__/path.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7ddcaa406185794df163c12c8be18ad212e078c1 Binary files /dev/null and b/modules/__pycache__/path.cpython-310.pyc differ diff --git a/modules/__pycache__/private_logger.cpython-310.pyc b/modules/__pycache__/private_logger.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..518e53c234dcad3d0daec96815ad5ead376b4ff1 Binary files /dev/null and b/modules/__pycache__/private_logger.cpython-310.pyc differ diff --git a/modules/__pycache__/sample_hijack.cpython-310.pyc b/modules/__pycache__/sample_hijack.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..77a465b53cfe3d652373bc0433ca065ebe988644 Binary files /dev/null and b/modules/__pycache__/sample_hijack.cpython-310.pyc differ diff --git a/modules/__pycache__/samplers_advanced.cpython-310.pyc b/modules/__pycache__/samplers_advanced.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a140b6406573486fc6853b6c9cadd23b8b61a64d Binary files /dev/null and b/modules/__pycache__/samplers_advanced.cpython-310.pyc differ diff --git a/modules/__pycache__/sdxl_styles.cpython-310.pyc b/modules/__pycache__/sdxl_styles.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a4db3f282d380ee917b635abd532fb755ffdc53d Binary files /dev/null and b/modules/__pycache__/sdxl_styles.cpython-310.pyc differ diff --git a/modules/__pycache__/sdxl_styles.cpython-312.pyc b/modules/__pycache__/sdxl_styles.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0d94a07d1dc6fb724de5eae795af4b7ebeef87ea Binary files /dev/null and b/modules/__pycache__/sdxl_styles.cpython-312.pyc differ diff --git a/modules/__pycache__/style_sorter.cpython-310.pyc b/modules/__pycache__/style_sorter.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..864c1b4b9ad35e6254e468fd9a4b8e600b6eb92c Binary files /dev/null and b/modules/__pycache__/style_sorter.cpython-310.pyc differ diff --git a/modules/__pycache__/ui_gradio_extensions.cpython-310.pyc b/modules/__pycache__/ui_gradio_extensions.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..daa1d07f8eed613518c94cbd6b7370e1cfb1c0bf Binary files /dev/null and b/modules/__pycache__/ui_gradio_extensions.cpython-310.pyc differ diff --git a/modules/__pycache__/upscaler.cpython-310.pyc b/modules/__pycache__/upscaler.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7498d2c4484ce4479fdb0da8572d240734c6e9d9 Binary files /dev/null and b/modules/__pycache__/upscaler.cpython-310.pyc differ diff --git a/modules/__pycache__/util.cpython-310.pyc b/modules/__pycache__/util.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3735a9a9d85028182c62918ac11ed2ca936a8e4a Binary files /dev/null and b/modules/__pycache__/util.cpython-310.pyc differ diff --git a/modules/__pycache__/util.cpython-312.pyc b/modules/__pycache__/util.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bda34a2330d433469a4c0fa88376116b04f33ae7 Binary files /dev/null and b/modules/__pycache__/util.cpython-312.pyc differ diff --git a/modules/anisotropic.py b/modules/anisotropic.py new file mode 100644 index 0000000000000000000000000000000000000000..576822240762b7dfcfb27e49364314ee1cb436d9 --- /dev/null +++ b/modules/anisotropic.py @@ -0,0 +1,200 @@ +import torch + + +Tensor = torch.Tensor +Device = torch.DeviceObjType +Dtype = torch.Type +pad = torch.nn.functional.pad + + +def _compute_zero_padding(kernel_size: tuple[int, int] | int) -> tuple[int, int]: + ky, kx = _unpack_2d_ks(kernel_size) + return (ky - 1) // 2, (kx - 1) // 2 + + +def _unpack_2d_ks(kernel_size: tuple[int, int] | int) -> tuple[int, int]: + if isinstance(kernel_size, int): + ky = kx = kernel_size + else: + assert len(kernel_size) == 2, '2D Kernel size should have a length of 2.' + ky, kx = kernel_size + + ky = int(ky) + kx = int(kx) + return ky, kx + + +def gaussian( + window_size: int, sigma: Tensor | float, *, device: Device | None = None, dtype: Dtype | None = None +) -> Tensor: + + batch_size = sigma.shape[0] + + x = (torch.arange(window_size, device=sigma.device, dtype=sigma.dtype) - window_size // 2).expand(batch_size, -1) + + if window_size % 2 == 0: + x = x + 0.5 + + gauss = torch.exp(-x.pow(2.0) / (2 * sigma.pow(2.0))) + + return gauss / gauss.sum(-1, keepdim=True) + + +def get_gaussian_kernel1d( + kernel_size: int, + sigma: float | Tensor, + force_even: bool = False, + *, + device: Device | None = None, + dtype: Dtype | None = None, +) -> Tensor: + + return gaussian(kernel_size, sigma, device=device, dtype=dtype) + + +def get_gaussian_kernel2d( + kernel_size: tuple[int, int] | int, + sigma: tuple[float, float] | Tensor, + force_even: bool = False, + *, + device: Device | None = None, + dtype: Dtype | None = None, +) -> Tensor: + + sigma = torch.Tensor([[sigma, sigma]]).to(device=device, dtype=dtype) + + ksize_y, ksize_x = _unpack_2d_ks(kernel_size) + sigma_y, sigma_x = sigma[:, 0, None], sigma[:, 1, None] + + kernel_y = get_gaussian_kernel1d(ksize_y, sigma_y, force_even, device=device, dtype=dtype)[..., None] + kernel_x = get_gaussian_kernel1d(ksize_x, sigma_x, force_even, device=device, dtype=dtype)[..., None] + + return kernel_y * kernel_x.view(-1, 1, ksize_x) + + +def _bilateral_blur( + input: Tensor, + guidance: Tensor | None, + kernel_size: tuple[int, int] | int, + sigma_color: float | Tensor, + sigma_space: tuple[float, float] | Tensor, + border_type: str = 'reflect', + color_distance_type: str = 'l1', +) -> Tensor: + + if isinstance(sigma_color, Tensor): + sigma_color = sigma_color.to(device=input.device, dtype=input.dtype).view(-1, 1, 1, 1, 1) + + ky, kx = _unpack_2d_ks(kernel_size) + pad_y, pad_x = _compute_zero_padding(kernel_size) + + padded_input = pad(input, (pad_x, pad_x, pad_y, pad_y), mode=border_type) + unfolded_input = padded_input.unfold(2, ky, 1).unfold(3, kx, 1).flatten(-2) # (B, C, H, W, Ky x Kx) + + if guidance is None: + guidance = input + unfolded_guidance = unfolded_input + else: + padded_guidance = pad(guidance, (pad_x, pad_x, pad_y, pad_y), mode=border_type) + unfolded_guidance = padded_guidance.unfold(2, ky, 1).unfold(3, kx, 1).flatten(-2) # (B, C, H, W, Ky x Kx) + + diff = unfolded_guidance - guidance.unsqueeze(-1) + if color_distance_type == "l1": + color_distance_sq = diff.abs().sum(1, keepdim=True).square() + elif color_distance_type == "l2": + color_distance_sq = diff.square().sum(1, keepdim=True) + else: + raise ValueError("color_distance_type only acceps l1 or l2") + color_kernel = (-0.5 / sigma_color**2 * color_distance_sq).exp() # (B, 1, H, W, Ky x Kx) + + space_kernel = get_gaussian_kernel2d(kernel_size, sigma_space, device=input.device, dtype=input.dtype) + space_kernel = space_kernel.view(-1, 1, 1, 1, kx * ky) + + kernel = space_kernel * color_kernel + out = (unfolded_input * kernel).sum(-1) / kernel.sum(-1) + return out + + +def bilateral_blur( + input: Tensor, + kernel_size: tuple[int, int] | int = (13, 13), + sigma_color: float | Tensor = 3.0, + sigma_space: tuple[float, float] | Tensor = 3.0, + border_type: str = 'reflect', + color_distance_type: str = 'l1', +) -> Tensor: + return _bilateral_blur(input, None, kernel_size, sigma_color, sigma_space, border_type, color_distance_type) + + +def adaptive_anisotropic_filter(x, g=None): + if g is None: + g = x + s, m = torch.std_mean(g, dim=(1, 2, 3), keepdim=True) + s = s + 1e-5 + guidance = (g - m) / s + y = _bilateral_blur(x, guidance, + kernel_size=(13, 13), + sigma_color=3.0, + sigma_space=3.0, + border_type='reflect', + color_distance_type='l1') + return y + + +def joint_bilateral_blur( + input: Tensor, + guidance: Tensor, + kernel_size: tuple[int, int] | int, + sigma_color: float | Tensor, + sigma_space: tuple[float, float] | Tensor, + border_type: str = 'reflect', + color_distance_type: str = 'l1', +) -> Tensor: + return _bilateral_blur(input, guidance, kernel_size, sigma_color, sigma_space, border_type, color_distance_type) + + +class _BilateralBlur(torch.nn.Module): + def __init__( + self, + kernel_size: tuple[int, int] | int, + sigma_color: float | Tensor, + sigma_space: tuple[float, float] | Tensor, + border_type: str = 'reflect', + color_distance_type: str = "l1", + ) -> None: + super().__init__() + self.kernel_size = kernel_size + self.sigma_color = sigma_color + self.sigma_space = sigma_space + self.border_type = border_type + self.color_distance_type = color_distance_type + + def __repr__(self) -> str: + return ( + f"{self.__class__.__name__}" + f"(kernel_size={self.kernel_size}, " + f"sigma_color={self.sigma_color}, " + f"sigma_space={self.sigma_space}, " + f"border_type={self.border_type}, " + f"color_distance_type={self.color_distance_type})" + ) + + +class BilateralBlur(_BilateralBlur): + def forward(self, input: Tensor) -> Tensor: + return bilateral_blur( + input, self.kernel_size, self.sigma_color, self.sigma_space, self.border_type, self.color_distance_type + ) + + +class JointBilateralBlur(_BilateralBlur): + def forward(self, input: Tensor, guidance: Tensor) -> Tensor: + return joint_bilateral_blur( + input, + guidance, + self.kernel_size, + self.sigma_color, + self.sigma_space, + self.border_type, + self.color_distance_type, + ) diff --git a/modules/async_worker.py b/modules/async_worker.py new file mode 100644 index 0000000000000000000000000000000000000000..83fc39123f169a3e96a7e0bcf4e9ca92f046f7d0 --- /dev/null +++ b/modules/async_worker.py @@ -0,0 +1,914 @@ +import threading +from modules.patch import PatchSettings, patch_settings, patch_all + +patch_all() + +class AsyncTask: + def __init__(self, args): + self.args = args + self.yields = [] + self.results = [] + self.last_stop = False + self.processing = False + + +async_tasks = [] + + +def worker(): + global async_tasks + + import os + import traceback + import math + import numpy as np + import cv2 + import torch + import time + import shared + import random + import copy + import modules.default_pipeline as pipeline + import modules.core as core + import modules.flags as flags + import modules.config + import modules.patch + import ldm_patched.modules.model_management + import extras.preprocessors as preprocessors + import modules.inpaint_worker as inpaint_worker + import modules.constants as constants + import extras.ip_adapter as ip_adapter + import extras.face_crop + import fooocus_version + import args_manager + + from modules.sdxl_styles import apply_style, apply_wildcards, fooocus_expansion, apply_arrays + from modules.private_logger import log + from extras.expansion import safe_str + from modules.util import remove_empty_str, HWC3, resize_image, \ + get_image_shape_ceil, set_image_shape_ceil, get_shape_ceil, resample_image, erode_or_dilate, ordinal_suffix + from modules.upscaler import perform_upscale + from modules.flags import Performance + from modules.meta_parser import get_metadata_parser, MetadataScheme + + pid = os.getpid() + print(f'Started worker with PID {pid}') + + try: + async_gradio_app = shared.gradio_root + flag = f'''App started successful. Use the app with {str(async_gradio_app.local_url)} or {str(async_gradio_app.server_name)}:{str(async_gradio_app.server_port)}''' + if async_gradio_app.share: + flag += f''' or {async_gradio_app.share_url}''' + print(flag) + except Exception as e: + print(e) + + def progressbar(async_task, number, text): + print(f'[Fooocus] {text}') + async_task.yields.append(['preview', (number, text, None)]) + + def yield_result(async_task, imgs, do_not_show_finished_images=False): + if not isinstance(imgs, list): + imgs = [imgs] + + async_task.results = async_task.results + imgs + + if do_not_show_finished_images: + return + + async_task.yields.append(['results', async_task.results]) + return + + def build_image_wall(async_task): + results = [] + + if len(async_task.results) < 2: + return + + for img in async_task.results: + if isinstance(img, str) and os.path.exists(img): + img = cv2.imread(img) + img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) + if not isinstance(img, np.ndarray): + return + if img.ndim != 3: + return + results.append(img) + + H, W, C = results[0].shape + + for img in results: + Hn, Wn, Cn = img.shape + if H != Hn: + return + if W != Wn: + return + if C != Cn: + return + + cols = float(len(results)) ** 0.5 + cols = int(math.ceil(cols)) + rows = float(len(results)) / float(cols) + rows = int(math.ceil(rows)) + + wall = np.zeros(shape=(H * rows, W * cols, C), dtype=np.uint8) + + for y in range(rows): + for x in range(cols): + if y * cols + x < len(results): + img = results[y * cols + x] + wall[y * H:y * H + H, x * W:x * W + W, :] = img + + # must use deep copy otherwise gradio is super laggy. Do not use list.append() . + async_task.results = async_task.results + [wall] + return + + def apply_enabled_loras(loras): + enabled_loras = [] + for lora_enabled, lora_model, lora_weight in loras: + if lora_enabled: + enabled_loras.append([lora_model, lora_weight]) + + return enabled_loras + + @torch.no_grad() + @torch.inference_mode() + def handler(async_task): + execution_start_time = time.perf_counter() + async_task.processing = True + + args = async_task.args + args.reverse() + + prompt = args.pop() + negative_prompt = args.pop() + style_selections = args.pop() + performance_selection = Performance(args.pop()) + aspect_ratios_selection = args.pop() + image_number = args.pop() + output_format = args.pop() + image_seed = args.pop() + sharpness = args.pop() + guidance_scale = args.pop() + base_model_name = args.pop() + refiner_model_name = args.pop() + refiner_switch = args.pop() + loras = apply_enabled_loras([[bool(args.pop()), str(args.pop()), float(args.pop()), ] for _ in range(modules.config.default_max_lora_number)]) + input_image_checkbox = args.pop() + current_tab = args.pop() + uov_method = args.pop() + uov_input_image = args.pop() + outpaint_selections = args.pop() + inpaint_input_image = args.pop() + inpaint_additional_prompt = args.pop() + inpaint_mask_image_upload = args.pop() + + disable_preview = args.pop() + disable_intermediate_results = args.pop() + disable_seed_increment = args.pop() + adm_scaler_positive = args.pop() + adm_scaler_negative = args.pop() + adm_scaler_end = args.pop() + adaptive_cfg = args.pop() + sampler_name = args.pop() + scheduler_name = args.pop() + overwrite_step = args.pop() + overwrite_switch = args.pop() + overwrite_width = args.pop() + overwrite_height = args.pop() + overwrite_vary_strength = args.pop() + overwrite_upscale_strength = args.pop() + mixing_image_prompt_and_vary_upscale = args.pop() + mixing_image_prompt_and_inpaint = args.pop() + debugging_cn_preprocessor = args.pop() + skipping_cn_preprocessor = args.pop() + canny_low_threshold = args.pop() + canny_high_threshold = args.pop() + refiner_swap_method = args.pop() + controlnet_softness = args.pop() + freeu_enabled = args.pop() + freeu_b1 = args.pop() + freeu_b2 = args.pop() + freeu_s1 = args.pop() + freeu_s2 = args.pop() + debugging_inpaint_preprocessor = args.pop() + inpaint_disable_initial_latent = args.pop() + inpaint_engine = args.pop() + inpaint_strength = args.pop() + inpaint_respective_field = args.pop() + inpaint_mask_upload_checkbox = args.pop() + invert_mask_checkbox = args.pop() + inpaint_erode_or_dilate = args.pop() + + save_metadata_to_images = args.pop() if not args_manager.args.disable_metadata else False + metadata_scheme = MetadataScheme(args.pop()) if not args_manager.args.disable_metadata else MetadataScheme.FOOOCUS + + cn_tasks = {x: [] for x in flags.ip_list} + for _ in range(flags.controlnet_image_count): + cn_img = args.pop() + cn_stop = args.pop() + cn_weight = args.pop() + cn_type = args.pop() + if cn_img is not None: + cn_tasks[cn_type].append([cn_img, cn_stop, cn_weight]) + + outpaint_selections = [o.lower() for o in outpaint_selections] + base_model_additional_loras = [] + raw_style_selections = copy.deepcopy(style_selections) + uov_method = uov_method.lower() + + if fooocus_expansion in style_selections: + use_expansion = True + style_selections.remove(fooocus_expansion) + else: + use_expansion = False + + use_style = len(style_selections) > 0 + + if base_model_name == refiner_model_name: + print(f'Refiner disabled because base model and refiner are same.') + refiner_model_name = 'None' + + steps = performance_selection.steps() + + if performance_selection == Performance.EXTREME_SPEED: + print('Enter LCM mode.') + progressbar(async_task, 1, 'Downloading LCM components ...') + loras += [(modules.config.downloading_sdxl_lcm_lora(), 1.0)] + + if refiner_model_name != 'None': + print(f'Refiner disabled in LCM mode.') + + refiner_model_name = 'None' + sampler_name = 'lcm' + scheduler_name = 'lcm' + sharpness = 0.0 + guidance_scale = 1.0 + adaptive_cfg = 1.0 + refiner_switch = 1.0 + adm_scaler_positive = 1.0 + adm_scaler_negative = 1.0 + adm_scaler_end = 0.0 + + print(f'[Parameters] Adaptive CFG = {adaptive_cfg}') + print(f'[Parameters] Sharpness = {sharpness}') + print(f'[Parameters] ControlNet Softness = {controlnet_softness}') + print(f'[Parameters] ADM Scale = ' + f'{adm_scaler_positive} : ' + f'{adm_scaler_negative} : ' + f'{adm_scaler_end}') + + patch_settings[pid] = PatchSettings( + sharpness, + adm_scaler_end, + adm_scaler_positive, + adm_scaler_negative, + controlnet_softness, + adaptive_cfg + ) + + cfg_scale = float(guidance_scale) + print(f'[Parameters] CFG = {cfg_scale}') + + initial_latent = None + denoising_strength = 1.0 + tiled = False + + width, height = aspect_ratios_selection.replace('×', ' ').split(' ')[:2] + width, height = int(width), int(height) + + skip_prompt_processing = False + + inpaint_worker.current_task = None + inpaint_parameterized = inpaint_engine != 'None' + inpaint_image = None + inpaint_mask = None + inpaint_head_model_path = None + + use_synthetic_refiner = False + + controlnet_canny_path = None + controlnet_cpds_path = None + clip_vision_path, ip_negative_path, ip_adapter_path, ip_adapter_face_path = None, None, None, None + + seed = int(image_seed) + print(f'[Parameters] Seed = {seed}') + + goals = [] + tasks = [] + + if input_image_checkbox: + if (current_tab == 'uov' or ( + current_tab == 'ip' and mixing_image_prompt_and_vary_upscale)) \ + and uov_method != flags.disabled and uov_input_image is not None: + uov_input_image = HWC3(uov_input_image) + if 'vary' in uov_method: + goals.append('vary') + elif 'upscale' in uov_method: + goals.append('upscale') + if 'fast' in uov_method: + skip_prompt_processing = True + else: + steps = performance_selection.steps_uov() + + progressbar(async_task, 1, 'Downloading upscale models ...') + modules.config.downloading_upscale_model() + if (current_tab == 'inpaint' or ( + current_tab == 'ip' and mixing_image_prompt_and_inpaint)) \ + and isinstance(inpaint_input_image, dict): + inpaint_image = inpaint_input_image['image'] + inpaint_mask = inpaint_input_image['mask'][:, :, 0] + + if inpaint_mask_upload_checkbox: + if isinstance(inpaint_mask_image_upload, np.ndarray): + if inpaint_mask_image_upload.ndim == 3: + H, W, C = inpaint_image.shape + inpaint_mask_image_upload = resample_image(inpaint_mask_image_upload, width=W, height=H) + inpaint_mask_image_upload = np.mean(inpaint_mask_image_upload, axis=2) + inpaint_mask_image_upload = (inpaint_mask_image_upload > 127).astype(np.uint8) * 255 + inpaint_mask = np.maximum(inpaint_mask, inpaint_mask_image_upload) + + if int(inpaint_erode_or_dilate) != 0: + inpaint_mask = erode_or_dilate(inpaint_mask, inpaint_erode_or_dilate) + + if invert_mask_checkbox: + inpaint_mask = 255 - inpaint_mask + + inpaint_image = HWC3(inpaint_image) + if isinstance(inpaint_image, np.ndarray) and isinstance(inpaint_mask, np.ndarray) \ + and (np.any(inpaint_mask > 127) or len(outpaint_selections) > 0): + progressbar(async_task, 1, 'Downloading upscale models ...') + modules.config.downloading_upscale_model() + if inpaint_parameterized: + progressbar(async_task, 1, 'Downloading inpainter ...') + inpaint_head_model_path, inpaint_patch_model_path = modules.config.downloading_inpaint_models( + inpaint_engine) + base_model_additional_loras += [(inpaint_patch_model_path, 1.0)] + print(f'[Inpaint] Current inpaint model is {inpaint_patch_model_path}') + if refiner_model_name == 'None': + use_synthetic_refiner = True + refiner_switch = 0.5 + else: + inpaint_head_model_path, inpaint_patch_model_path = None, None + print(f'[Inpaint] Parameterized inpaint is disabled.') + if inpaint_additional_prompt != '': + if prompt == '': + prompt = inpaint_additional_prompt + else: + prompt = inpaint_additional_prompt + '\n' + prompt + goals.append('inpaint') + if current_tab == 'ip' or \ + mixing_image_prompt_and_vary_upscale or \ + mixing_image_prompt_and_inpaint: + goals.append('cn') + progressbar(async_task, 1, 'Downloading control models ...') + if len(cn_tasks[flags.cn_canny]) > 0: + controlnet_canny_path = modules.config.downloading_controlnet_canny() + if len(cn_tasks[flags.cn_cpds]) > 0: + controlnet_cpds_path = modules.config.downloading_controlnet_cpds() + if len(cn_tasks[flags.cn_ip]) > 0: + clip_vision_path, ip_negative_path, ip_adapter_path = modules.config.downloading_ip_adapters('ip') + if len(cn_tasks[flags.cn_ip_face]) > 0: + clip_vision_path, ip_negative_path, ip_adapter_face_path = modules.config.downloading_ip_adapters( + 'face') + progressbar(async_task, 1, 'Loading control models ...') + + # Load or unload CNs + pipeline.refresh_controlnets([controlnet_canny_path, controlnet_cpds_path]) + ip_adapter.load_ip_adapter(clip_vision_path, ip_negative_path, ip_adapter_path) + ip_adapter.load_ip_adapter(clip_vision_path, ip_negative_path, ip_adapter_face_path) + + if overwrite_step > 0: + steps = overwrite_step + + switch = int(round(steps * refiner_switch)) + + if overwrite_switch > 0: + switch = overwrite_switch + + if overwrite_width > 0: + width = overwrite_width + + if overwrite_height > 0: + height = overwrite_height + + print(f'[Parameters] Sampler = {sampler_name} - {scheduler_name}') + print(f'[Parameters] Steps = {steps} - {switch}') + + progressbar(async_task, 1, 'Initializing ...') + + if not skip_prompt_processing: + + prompts = remove_empty_str([safe_str(p) for p in prompt.splitlines()], default='') + negative_prompts = remove_empty_str([safe_str(p) for p in negative_prompt.splitlines()], default='') + + prompt = prompts[0] + negative_prompt = negative_prompts[0] + + if prompt == '': + # disable expansion when empty since it is not meaningful and influences image prompt + use_expansion = False + + extra_positive_prompts = prompts[1:] if len(prompts) > 1 else [] + extra_negative_prompts = negative_prompts[1:] if len(negative_prompts) > 1 else [] + + progressbar(async_task, 3, 'Loading models ...') + pipeline.refresh_everything(refiner_model_name=refiner_model_name, base_model_name=base_model_name, + loras=loras, base_model_additional_loras=base_model_additional_loras, + use_synthetic_refiner=use_synthetic_refiner) + + progressbar(async_task, 3, 'Processing prompts ...') + tasks = [] + + for i in range(image_number): + if disable_seed_increment: + task_seed = seed + else: + task_seed = (seed + i) % (constants.MAX_SEED + 1) # randint is inclusive, % is not + + task_rng = random.Random(task_seed) # may bind to inpaint noise in the future + task_prompt = apply_wildcards(prompt, task_rng) + task_prompt = apply_arrays(task_prompt, i) + task_negative_prompt = apply_wildcards(negative_prompt, task_rng) + task_extra_positive_prompts = [apply_wildcards(pmt, task_rng) for pmt in extra_positive_prompts] + task_extra_negative_prompts = [apply_wildcards(pmt, task_rng) for pmt in extra_negative_prompts] + + positive_basic_workloads = [] + negative_basic_workloads = [] + + if use_style: + for s in style_selections: + p, n = apply_style(s, positive=task_prompt) + positive_basic_workloads = positive_basic_workloads + p + negative_basic_workloads = negative_basic_workloads + n + else: + positive_basic_workloads.append(task_prompt) + + negative_basic_workloads.append(task_negative_prompt) # Always use independent workload for negative. + + positive_basic_workloads = positive_basic_workloads + task_extra_positive_prompts + negative_basic_workloads = negative_basic_workloads + task_extra_negative_prompts + + positive_basic_workloads = remove_empty_str(positive_basic_workloads, default=task_prompt) + negative_basic_workloads = remove_empty_str(negative_basic_workloads, default=task_negative_prompt) + + tasks.append(dict( + task_seed=task_seed, + task_prompt=task_prompt, + task_negative_prompt=task_negative_prompt, + positive=positive_basic_workloads, + negative=negative_basic_workloads, + expansion='', + c=None, + uc=None, + positive_top_k=len(positive_basic_workloads), + negative_top_k=len(negative_basic_workloads), + log_positive_prompt='\n'.join([task_prompt] + task_extra_positive_prompts), + log_negative_prompt='\n'.join([task_negative_prompt] + task_extra_negative_prompts), + )) + + if use_expansion: + for i, t in enumerate(tasks): + progressbar(async_task, 5, f'Preparing Fooocus text #{i + 1} ...') + expansion = pipeline.final_expansion(t['task_prompt'], t['task_seed']) + print(f'[Prompt Expansion] {expansion}') + t['expansion'] = expansion + t['positive'] = copy.deepcopy(t['positive']) + [expansion] # Deep copy. + + for i, t in enumerate(tasks): + progressbar(async_task, 7, f'Encoding positive #{i + 1} ...') + t['c'] = pipeline.clip_encode(texts=t['positive'], pool_top_k=t['positive_top_k']) + + for i, t in enumerate(tasks): + if abs(float(cfg_scale) - 1.0) < 1e-4: + t['uc'] = pipeline.clone_cond(t['c']) + else: + progressbar(async_task, 10, f'Encoding negative #{i + 1} ...') + t['uc'] = pipeline.clip_encode(texts=t['negative'], pool_top_k=t['negative_top_k']) + + if len(goals) > 0: + progressbar(async_task, 13, 'Image processing ...') + + if 'vary' in goals: + if 'subtle' in uov_method: + denoising_strength = 0.5 + if 'strong' in uov_method: + denoising_strength = 0.85 + if overwrite_vary_strength > 0: + denoising_strength = overwrite_vary_strength + + shape_ceil = get_image_shape_ceil(uov_input_image) + if shape_ceil < 1024: + print(f'[Vary] Image is resized because it is too small.') + shape_ceil = 1024 + elif shape_ceil > 2048: + print(f'[Vary] Image is resized because it is too big.') + shape_ceil = 2048 + + uov_input_image = set_image_shape_ceil(uov_input_image, shape_ceil) + + initial_pixels = core.numpy_to_pytorch(uov_input_image) + progressbar(async_task, 13, 'VAE encoding ...') + + candidate_vae, _ = pipeline.get_candidate_vae( + steps=steps, + switch=switch, + denoise=denoising_strength, + refiner_swap_method=refiner_swap_method + ) + + initial_latent = core.encode_vae(vae=candidate_vae, pixels=initial_pixels) + B, C, H, W = initial_latent['samples'].shape + width = W * 8 + height = H * 8 + print(f'Final resolution is {str((height, width))}.') + + if 'upscale' in goals: + H, W, C = uov_input_image.shape + progressbar(async_task, 13, f'Upscaling image from {str((H, W))} ...') + uov_input_image = perform_upscale(uov_input_image) + print(f'Image upscaled.') + + if '1.5x' in uov_method: + f = 1.5 + elif '2x' in uov_method: + f = 2.0 + else: + f = 1.0 + + shape_ceil = get_shape_ceil(H * f, W * f) + + if shape_ceil < 1024: + print(f'[Upscale] Image is resized because it is too small.') + uov_input_image = set_image_shape_ceil(uov_input_image, 1024) + shape_ceil = 1024 + else: + uov_input_image = resample_image(uov_input_image, width=W * f, height=H * f) + + image_is_super_large = shape_ceil > 2800 + + if 'fast' in uov_method: + direct_return = True + elif image_is_super_large: + print('Image is too large. Directly returned the SR image. ' + 'Usually directly return SR image at 4K resolution ' + 'yields better results than SDXL diffusion.') + direct_return = True + else: + direct_return = False + + if direct_return: + d = [('Upscale (Fast)', 'upscale_fast', '2x')] + uov_input_image_path = log(uov_input_image, d, output_format=output_format) + yield_result(async_task, uov_input_image_path, do_not_show_finished_images=True) + return + + tiled = True + denoising_strength = 0.382 + + if overwrite_upscale_strength > 0: + denoising_strength = overwrite_upscale_strength + + initial_pixels = core.numpy_to_pytorch(uov_input_image) + progressbar(async_task, 13, 'VAE encoding ...') + + candidate_vae, _ = pipeline.get_candidate_vae( + steps=steps, + switch=switch, + denoise=denoising_strength, + refiner_swap_method=refiner_swap_method + ) + + initial_latent = core.encode_vae( + vae=candidate_vae, + pixels=initial_pixels, tiled=True) + B, C, H, W = initial_latent['samples'].shape + width = W * 8 + height = H * 8 + print(f'Final resolution is {str((height, width))}.') + + if 'inpaint' in goals: + if len(outpaint_selections) > 0: + H, W, C = inpaint_image.shape + if 'top' in outpaint_selections: + inpaint_image = np.pad(inpaint_image, [[int(H * 0.3), 0], [0, 0], [0, 0]], mode='edge') + inpaint_mask = np.pad(inpaint_mask, [[int(H * 0.3), 0], [0, 0]], mode='constant', + constant_values=255) + if 'bottom' in outpaint_selections: + inpaint_image = np.pad(inpaint_image, [[0, int(H * 0.3)], [0, 0], [0, 0]], mode='edge') + inpaint_mask = np.pad(inpaint_mask, [[0, int(H * 0.3)], [0, 0]], mode='constant', + constant_values=255) + + H, W, C = inpaint_image.shape + if 'left' in outpaint_selections: + inpaint_image = np.pad(inpaint_image, [[0, 0], [int(H * 0.3), 0], [0, 0]], mode='edge') + inpaint_mask = np.pad(inpaint_mask, [[0, 0], [int(H * 0.3), 0]], mode='constant', + constant_values=255) + if 'right' in outpaint_selections: + inpaint_image = np.pad(inpaint_image, [[0, 0], [0, int(H * 0.3)], [0, 0]], mode='edge') + inpaint_mask = np.pad(inpaint_mask, [[0, 0], [0, int(H * 0.3)]], mode='constant', + constant_values=255) + + inpaint_image = np.ascontiguousarray(inpaint_image.copy()) + inpaint_mask = np.ascontiguousarray(inpaint_mask.copy()) + inpaint_strength = 1.0 + inpaint_respective_field = 1.0 + + denoising_strength = inpaint_strength + + inpaint_worker.current_task = inpaint_worker.InpaintWorker( + image=inpaint_image, + mask=inpaint_mask, + use_fill=denoising_strength > 0.99, + k=inpaint_respective_field + ) + + if debugging_inpaint_preprocessor: + yield_result(async_task, inpaint_worker.current_task.visualize_mask_processing(), + do_not_show_finished_images=True) + return + + progressbar(async_task, 13, 'VAE Inpaint encoding ...') + + inpaint_pixel_fill = core.numpy_to_pytorch(inpaint_worker.current_task.interested_fill) + inpaint_pixel_image = core.numpy_to_pytorch(inpaint_worker.current_task.interested_image) + inpaint_pixel_mask = core.numpy_to_pytorch(inpaint_worker.current_task.interested_mask) + + candidate_vae, candidate_vae_swap = pipeline.get_candidate_vae( + steps=steps, + switch=switch, + denoise=denoising_strength, + refiner_swap_method=refiner_swap_method + ) + + latent_inpaint, latent_mask = core.encode_vae_inpaint( + mask=inpaint_pixel_mask, + vae=candidate_vae, + pixels=inpaint_pixel_image) + + latent_swap = None + if candidate_vae_swap is not None: + progressbar(async_task, 13, 'VAE SD15 encoding ...') + latent_swap = core.encode_vae( + vae=candidate_vae_swap, + pixels=inpaint_pixel_fill)['samples'] + + progressbar(async_task, 13, 'VAE encoding ...') + latent_fill = core.encode_vae( + vae=candidate_vae, + pixels=inpaint_pixel_fill)['samples'] + + inpaint_worker.current_task.load_latent( + latent_fill=latent_fill, latent_mask=latent_mask, latent_swap=latent_swap) + + if inpaint_parameterized: + pipeline.final_unet = inpaint_worker.current_task.patch( + inpaint_head_model_path=inpaint_head_model_path, + inpaint_latent=latent_inpaint, + inpaint_latent_mask=latent_mask, + model=pipeline.final_unet + ) + + if not inpaint_disable_initial_latent: + initial_latent = {'samples': latent_fill} + + B, C, H, W = latent_fill.shape + height, width = H * 8, W * 8 + final_height, final_width = inpaint_worker.current_task.image.shape[:2] + print(f'Final resolution is {str((final_height, final_width))}, latent is {str((height, width))}.') + + if 'cn' in goals: + for task in cn_tasks[flags.cn_canny]: + cn_img, cn_stop, cn_weight = task + cn_img = resize_image(HWC3(cn_img), width=width, height=height) + + if not skipping_cn_preprocessor: + cn_img = preprocessors.canny_pyramid(cn_img, canny_low_threshold, canny_high_threshold) + + cn_img = HWC3(cn_img) + task[0] = core.numpy_to_pytorch(cn_img) + if debugging_cn_preprocessor: + yield_result(async_task, cn_img, do_not_show_finished_images=True) + return + for task in cn_tasks[flags.cn_cpds]: + cn_img, cn_stop, cn_weight = task + cn_img = resize_image(HWC3(cn_img), width=width, height=height) + + if not skipping_cn_preprocessor: + cn_img = preprocessors.cpds(cn_img) + + cn_img = HWC3(cn_img) + task[0] = core.numpy_to_pytorch(cn_img) + if debugging_cn_preprocessor: + yield_result(async_task, cn_img, do_not_show_finished_images=True) + return + for task in cn_tasks[flags.cn_ip]: + cn_img, cn_stop, cn_weight = task + cn_img = HWC3(cn_img) + + # https://github.com/tencent-ailab/IP-Adapter/blob/d580c50a291566bbf9fc7ac0f760506607297e6d/README.md?plain=1#L75 + cn_img = resize_image(cn_img, width=224, height=224, resize_mode=0) + + task[0] = ip_adapter.preprocess(cn_img, ip_adapter_path=ip_adapter_path) + if debugging_cn_preprocessor: + yield_result(async_task, cn_img, do_not_show_finished_images=True) + return + for task in cn_tasks[flags.cn_ip_face]: + cn_img, cn_stop, cn_weight = task + cn_img = HWC3(cn_img) + + if not skipping_cn_preprocessor: + cn_img = extras.face_crop.crop_image(cn_img) + + # https://github.com/tencent-ailab/IP-Adapter/blob/d580c50a291566bbf9fc7ac0f760506607297e6d/README.md?plain=1#L75 + cn_img = resize_image(cn_img, width=224, height=224, resize_mode=0) + + task[0] = ip_adapter.preprocess(cn_img, ip_adapter_path=ip_adapter_face_path) + if debugging_cn_preprocessor: + yield_result(async_task, cn_img, do_not_show_finished_images=True) + return + + all_ip_tasks = cn_tasks[flags.cn_ip] + cn_tasks[flags.cn_ip_face] + + if len(all_ip_tasks) > 0: + pipeline.final_unet = ip_adapter.patch_model(pipeline.final_unet, all_ip_tasks) + + if freeu_enabled: + print(f'FreeU is enabled!') + pipeline.final_unet = core.apply_freeu( + pipeline.final_unet, + freeu_b1, + freeu_b2, + freeu_s1, + freeu_s2 + ) + + all_steps = steps * image_number + + print(f'[Parameters] Denoising Strength = {denoising_strength}') + + if isinstance(initial_latent, dict) and 'samples' in initial_latent: + log_shape = initial_latent['samples'].shape + else: + log_shape = f'Image Space {(height, width)}' + + print(f'[Parameters] Initial Latent shape: {log_shape}') + + preparation_time = time.perf_counter() - execution_start_time + print(f'Preparation time: {preparation_time:.2f} seconds') + + final_sampler_name = sampler_name + final_scheduler_name = scheduler_name + + if scheduler_name == 'lcm': + final_scheduler_name = 'sgm_uniform' + if pipeline.final_unet is not None: + pipeline.final_unet = core.opModelSamplingDiscrete.patch( + pipeline.final_unet, + sampling='lcm', + zsnr=False)[0] + if pipeline.final_refiner_unet is not None: + pipeline.final_refiner_unet = core.opModelSamplingDiscrete.patch( + pipeline.final_refiner_unet, + sampling='lcm', + zsnr=False)[0] + print('Using lcm scheduler.') + + async_task.yields.append(['preview', (13, 'Moving model to GPU ...', None)]) + + def callback(step, x0, x, total_steps, y): + done_steps = current_task_id * steps + step + async_task.yields.append(['preview', ( + int(15.0 + 85.0 * float(done_steps) / float(all_steps)), + f'Step {step}/{total_steps} in the {current_task_id + 1}{ordinal_suffix(current_task_id + 1)} Sampling', y)]) + + for current_task_id, task in enumerate(tasks): + execution_start_time = time.perf_counter() + + try: + if async_task.last_stop is not False: + ldm_patched.modules.model_management.interrupt_current_processing() + positive_cond, negative_cond = task['c'], task['uc'] + + if 'cn' in goals: + for cn_flag, cn_path in [ + (flags.cn_canny, controlnet_canny_path), + (flags.cn_cpds, controlnet_cpds_path) + ]: + for cn_img, cn_stop, cn_weight in cn_tasks[cn_flag]: + positive_cond, negative_cond = core.apply_controlnet( + positive_cond, negative_cond, + pipeline.loaded_ControlNets[cn_path], cn_img, cn_weight, 0, cn_stop) + + imgs = pipeline.process_diffusion( + positive_cond=positive_cond, + negative_cond=negative_cond, + steps=steps, + switch=switch, + width=width, + height=height, + image_seed=task['task_seed'], + callback=callback, + sampler_name=final_sampler_name, + scheduler_name=final_scheduler_name, + latent=initial_latent, + denoise=denoising_strength, + tiled=tiled, + cfg_scale=cfg_scale, + refiner_swap_method=refiner_swap_method, + disable_preview=disable_preview + ) + + del task['c'], task['uc'], positive_cond, negative_cond # Save memory + + if inpaint_worker.current_task is not None: + imgs = [inpaint_worker.current_task.post_process(x) for x in imgs] + + img_paths = [] + for x in imgs: + d = [('Prompt', 'prompt', task['log_positive_prompt']), + ('Negative Prompt', 'negative_prompt', task['log_negative_prompt']), + ('Fooocus V2 Expansion', 'prompt_expansion', task['expansion']), + ('Styles', 'styles', str(raw_style_selections)), + ('Performance', 'performance', performance_selection.value)] + + if performance_selection.steps() != steps: + d.append(('Steps', 'steps', steps)) + + d += [('Resolution', 'resolution', str((width, height))), + ('Guidance Scale', 'guidance_scale', guidance_scale), + ('Sharpness', 'sharpness', sharpness), + ('ADM Guidance', 'adm_guidance', str(( + modules.patch.patch_settings[pid].positive_adm_scale, + modules.patch.patch_settings[pid].negative_adm_scale, + modules.patch.patch_settings[pid].adm_scaler_end))), + ('Base Model', 'base_model', base_model_name), + ('Refiner Model', 'refiner_model', refiner_model_name), + ('Refiner Switch', 'refiner_switch', refiner_switch)] + + if refiner_model_name != 'None': + if overwrite_switch > 0: + d.append(('Overwrite Switch', 'overwrite_switch', overwrite_switch)) + if refiner_swap_method != flags.refiner_swap_method: + d.append(('Refiner Swap Method', 'refiner_swap_method', refiner_swap_method)) + if modules.patch.patch_settings[pid].adaptive_cfg != modules.config.default_cfg_tsnr: + d.append(('CFG Mimicking from TSNR', 'adaptive_cfg', modules.patch.patch_settings[pid].adaptive_cfg)) + + d.append(('Sampler', 'sampler', sampler_name)) + d.append(('Scheduler', 'scheduler', scheduler_name)) + d.append(('Seed', 'seed', task['task_seed'])) + + if freeu_enabled: + d.append(('FreeU', 'freeu', str((freeu_b1, freeu_b2, freeu_s1, freeu_s2)))) + + for li, (n, w) in enumerate(loras): + if n != 'None': + d.append((f'LoRA {li + 1}', f'lora_combined_{li + 1}', f'{n} : {w}')) + + metadata_parser = None + if save_metadata_to_images: + metadata_parser = modules.meta_parser.get_metadata_parser(metadata_scheme) + metadata_parser.set_data(task['log_positive_prompt'], task['positive'], + task['log_negative_prompt'], task['negative'], + steps, base_model_name, refiner_model_name, loras) + d.append(('Metadata Scheme', 'metadata_scheme', metadata_scheme.value if save_metadata_to_images else save_metadata_to_images)) + d.append(('Version', 'version', 'Fooocus v' + fooocus_version.version)) + img_paths.append(log(x, d, metadata_parser, output_format)) + + yield_result(async_task, img_paths, do_not_show_finished_images=len(tasks) == 1 or disable_intermediate_results) + except ldm_patched.modules.model_management.InterruptProcessingException as e: + if async_task.last_stop == 'skip': + print('User skipped') + async_task.last_stop = False + continue + else: + print('User stopped') + break + + execution_time = time.perf_counter() - execution_start_time + print(f'Generating and saving time: {execution_time:.2f} seconds') + async_task.processing = False + return + + while True: + time.sleep(0.01) + if len(async_tasks) > 0: + task = async_tasks.pop(0) + generate_image_grid = task.args.pop(0) + + try: + handler(task) + if generate_image_grid: + build_image_wall(task) + task.yields.append(['finish', task.results]) + pipeline.prepare_text_encoder(async_call=True) + except: + traceback.print_exc() + task.yields.append(['finish', task.results]) + finally: + if pid in modules.patch.patch_settings: + del modules.patch.patch_settings[pid] + pass + + +threading.Thread(target=worker, daemon=True).start() diff --git a/modules/auth.py b/modules/auth.py new file mode 100644 index 0000000000000000000000000000000000000000..3ba111424523c19174f8b741b3bbac7b43b7bb6c --- /dev/null +++ b/modules/auth.py @@ -0,0 +1,41 @@ +import json +import hashlib +import modules.constants as constants + +from os.path import exists + + +def auth_list_to_dict(auth_list): + auth_dict = {} + for auth_data in auth_list: + if 'user' in auth_data: + if 'hash' in auth_data: + auth_dict |= {auth_data['user']: auth_data['hash']} + elif 'pass' in auth_data: + auth_dict |= {auth_data['user']: hashlib.sha256(bytes(auth_data['pass'], encoding='utf-8')).hexdigest()} + return auth_dict + + +def load_auth_data(filename=None): + auth_dict = None + if filename != None and exists(filename): + with open(filename, encoding='utf-8') as auth_file: + try: + auth_obj = json.load(auth_file) + if isinstance(auth_obj, list) and len(auth_obj) > 0: + auth_dict = auth_list_to_dict(auth_obj) + except Exception as e: + print('load_auth_data, e: ' + str(e)) + return auth_dict + + +auth_dict = load_auth_data(constants.AUTH_FILENAME) + +auth_enabled = auth_dict != None + + +def check_auth(user, password): + if user not in auth_dict: + return False + else: + return hashlib.sha256(bytes(password, encoding='utf-8')).hexdigest() == auth_dict[user] diff --git a/modules/config.py b/modules/config.py new file mode 100644 index 0000000000000000000000000000000000000000..60ee1868fdda85ab63f0a8533b6999c0e1590d13 --- /dev/null +++ b/modules/config.py @@ -0,0 +1,607 @@ +import os +import json +import math +import numbers +import args_manager +import modules.flags +import modules.sdxl_styles + +from modules.model_loader import load_file_from_url +from modules.util import get_files_from_folder, makedirs_with_log +from modules.flags import Performance, MetadataScheme + +def get_config_path(key, default_value): + env = os.getenv(key) + if env is not None and isinstance(env, str): + print(f"Environment: {key} = {env}") + return env + else: + return os.path.abspath(default_value) + +config_path = get_config_path('config_path', "./config.txt") +config_example_path = get_config_path('config_example_path', "config_modification_tutorial.txt") +config_dict = {} +always_save_keys = [] +visited_keys = [] + +try: + with open(os.path.abspath(f'./presets/default.json'), "r", encoding="utf-8") as json_file: + config_dict.update(json.load(json_file)) +except Exception as e: + print(f'Load default preset failed.') + print(e) + +try: + if os.path.exists(config_path): + with open(config_path, "r", encoding="utf-8") as json_file: + config_dict.update(json.load(json_file)) + always_save_keys = list(config_dict.keys()) +except Exception as e: + print(f'Failed to load config file "{config_path}" . The reason is: {str(e)}') + print('Please make sure that:') + print(f'1. The file "{config_path}" is a valid text file, and you have access to read it.') + print('2. Use "\\\\" instead of "\\" when describing paths.') + print('3. There is no "," before the last "}".') + print('4. All key/value formats are correct.') + + +def try_load_deprecated_user_path_config(): + global config_dict + + if not os.path.exists('user_path_config.txt'): + return + + try: + deprecated_config_dict = json.load(open('user_path_config.txt', "r", encoding="utf-8")) + + def replace_config(old_key, new_key): + if old_key in deprecated_config_dict: + config_dict[new_key] = deprecated_config_dict[old_key] + del deprecated_config_dict[old_key] + + replace_config('modelfile_path', 'path_checkpoints') + replace_config('lorafile_path', 'path_loras') + replace_config('embeddings_path', 'path_embeddings') + replace_config('vae_approx_path', 'path_vae_approx') + replace_config('upscale_models_path', 'path_upscale_models') + replace_config('inpaint_models_path', 'path_inpaint') + replace_config('controlnet_models_path', 'path_controlnet') + replace_config('clip_vision_models_path', 'path_clip_vision') + replace_config('fooocus_expansion_path', 'path_fooocus_expansion') + replace_config('temp_outputs_path', 'path_outputs') + + if deprecated_config_dict.get("default_model", None) == 'juggernautXL_version6Rundiffusion.safetensors': + os.replace('user_path_config.txt', 'user_path_config-deprecated.txt') + print('Config updated successfully in silence. ' + 'A backup of previous config is written to "user_path_config-deprecated.txt".') + return + + if input("Newer models and configs are available. " + "Download and update files? [Y/n]:") in ['n', 'N', 'No', 'no', 'NO']: + config_dict.update(deprecated_config_dict) + print('Loading using deprecated old models and deprecated old configs.') + return + else: + os.replace('user_path_config.txt', 'user_path_config-deprecated.txt') + print('Config updated successfully by user. ' + 'A backup of previous config is written to "user_path_config-deprecated.txt".') + return + except Exception as e: + print('Processing deprecated config failed') + print(e) + return + + +try_load_deprecated_user_path_config() + +preset = args_manager.args.preset + +if isinstance(preset, str): + preset_path = os.path.abspath(f'./presets/{preset}.json') + try: + if os.path.exists(preset_path): + with open(preset_path, "r", encoding="utf-8") as json_file: + config_dict.update(json.load(json_file)) + print(f'Loaded preset: {preset_path}') + else: + raise FileNotFoundError + except Exception as e: + print(f'Load preset [{preset_path}] failed') + print(e) + + +def get_path_output() -> str: + """ + Checking output path argument and overriding default path. + """ + global config_dict + path_output = get_dir_or_set_default('path_outputs', '../outputs/', make_directory=True) + if args_manager.args.output_path: + print(f'[CONFIG] Overriding config value path_outputs with {args_manager.args.output_path}') + config_dict['path_outputs'] = path_output = args_manager.args.output_path + return path_output + + +def get_dir_or_set_default(key, default_value, as_array=False, make_directory=False): + global config_dict, visited_keys, always_save_keys + + if key not in visited_keys: + visited_keys.append(key) + + if key not in always_save_keys: + always_save_keys.append(key) + + v = os.getenv(key) + if v is not None: + print(f"Environment: {key} = {v}") + config_dict[key] = v + else: + v = config_dict.get(key, None) + + if isinstance(v, str): + if make_directory: + makedirs_with_log(v) + if os.path.exists(v) and os.path.isdir(v): + return v if not as_array else [v] + elif isinstance(v, list): + if make_directory: + for d in v: + makedirs_with_log(d) + if all([os.path.exists(d) and os.path.isdir(d) for d in v]): + return v + + if v is not None: + print(f'Failed to load config key: {json.dumps({key:v})} is invalid or does not exist; will use {json.dumps({key:default_value})} instead.') + if isinstance(default_value, list): + dp = [] + for path in default_value: + abs_path = os.path.abspath(os.path.join(os.path.dirname(__file__), path)) + dp.append(abs_path) + os.makedirs(abs_path, exist_ok=True) + else: + dp = os.path.abspath(os.path.join(os.path.dirname(__file__), default_value)) + os.makedirs(dp, exist_ok=True) + if as_array: + dp = [dp] + config_dict[key] = dp + return dp + + +paths_checkpoints = get_dir_or_set_default('path_checkpoints', ['../models/checkpoints/'], True) +paths_loras = get_dir_or_set_default('path_loras', ['../models/loras/'], True) +path_embeddings = get_dir_or_set_default('path_embeddings', '../models/embeddings/') +path_vae_approx = get_dir_or_set_default('path_vae_approx', '../models/vae_approx/') +path_upscale_models = get_dir_or_set_default('path_upscale_models', '../models/upscale_models/') +path_inpaint = get_dir_or_set_default('path_inpaint', '../models/inpaint/') +path_controlnet = get_dir_or_set_default('path_controlnet', '../models/controlnet/') +path_clip_vision = get_dir_or_set_default('path_clip_vision', '../models/clip_vision/') +path_fooocus_expansion = get_dir_or_set_default('path_fooocus_expansion', '../models/prompt_expansion/fooocus_expansion') +path_outputs = get_path_output() + +def get_config_item_or_set_default(key, default_value, validator, disable_empty_as_none=False): + global config_dict, visited_keys + + if key not in visited_keys: + visited_keys.append(key) + + v = os.getenv(key) + if v is not None: + print(f"Environment: {key} = {v}") + config_dict[key] = v + + if key not in config_dict: + config_dict[key] = default_value + return default_value + + v = config_dict.get(key, None) + if not disable_empty_as_none: + if v is None or v == '': + v = 'None' + if validator(v): + return v + else: + if v is not None: + print(f'Failed to load config key: {json.dumps({key:v})} is invalid; will use {json.dumps({key:default_value})} instead.') + config_dict[key] = default_value + return default_value + + +default_base_model_name = get_config_item_or_set_default( + key='default_model', + default_value='model.safetensors', + validator=lambda x: isinstance(x, str) +) +previous_default_models = get_config_item_or_set_default( + key='previous_default_models', + default_value=[], + validator=lambda x: isinstance(x, list) and all(isinstance(k, str) for k in x) +) +default_refiner_model_name = get_config_item_or_set_default( + key='default_refiner', + default_value='None', + validator=lambda x: isinstance(x, str) +) +default_refiner_switch = get_config_item_or_set_default( + key='default_refiner_switch', + default_value=0.8, + validator=lambda x: isinstance(x, numbers.Number) and 0 <= x <= 1 +) +default_loras_min_weight = get_config_item_or_set_default( + key='default_loras_min_weight', + default_value=-2, + validator=lambda x: isinstance(x, numbers.Number) and -10 <= x <= 10 +) +default_loras_max_weight = get_config_item_or_set_default( + key='default_loras_max_weight', + default_value=2, + validator=lambda x: isinstance(x, numbers.Number) and -10 <= x <= 10 +) +default_loras = get_config_item_or_set_default( + key='default_loras', + default_value=[ + [ + "None", + 1.0 + ], + [ + "None", + 1.0 + ], + [ + "None", + 1.0 + ], + [ + "None", + 1.0 + ], + [ + "None", + 1.0 + ] + ], + validator=lambda x: isinstance(x, list) and all(len(y) == 2 and isinstance(y[0], str) and isinstance(y[1], numbers.Number) for y in x) +) +default_max_lora_number = get_config_item_or_set_default( + key='default_max_lora_number', + default_value=len(default_loras) if isinstance(default_loras, list) and len(default_loras) > 0 else 5, + validator=lambda x: isinstance(x, int) and x >= 1 +) +default_cfg_scale = get_config_item_or_set_default( + key='default_cfg_scale', + default_value=7.0, + validator=lambda x: isinstance(x, numbers.Number) +) +default_sample_sharpness = get_config_item_or_set_default( + key='default_sample_sharpness', + default_value=2.0, + validator=lambda x: isinstance(x, numbers.Number) +) +default_sampler = get_config_item_or_set_default( + key='default_sampler', + default_value='dpmpp_2m_sde_gpu', + validator=lambda x: x in modules.flags.sampler_list +) +default_scheduler = get_config_item_or_set_default( + key='default_scheduler', + default_value='karras', + validator=lambda x: x in modules.flags.scheduler_list +) +default_styles = get_config_item_or_set_default( + key='default_styles', + default_value=[ + "Fooocus V2", + "Fooocus Enhance", + "Fooocus Sharp" + ], + validator=lambda x: isinstance(x, list) and all(y in modules.sdxl_styles.legal_style_names for y in x) +) +default_prompt_negative = get_config_item_or_set_default( + key='default_prompt_negative', + default_value='', + validator=lambda x: isinstance(x, str), + disable_empty_as_none=True +) +default_prompt = get_config_item_or_set_default( + key='default_prompt', + default_value='', + validator=lambda x: isinstance(x, str), + disable_empty_as_none=True +) +default_performance = get_config_item_or_set_default( + key='default_performance', + default_value=Performance.SPEED.value, + validator=lambda x: x in Performance.list() +) +default_advanced_checkbox = get_config_item_or_set_default( + key='default_advanced_checkbox', + default_value=False, + validator=lambda x: isinstance(x, bool) +) +default_max_image_number = get_config_item_or_set_default( + key='default_max_image_number', + default_value=32, + validator=lambda x: isinstance(x, int) and x >= 1 +) +default_output_format = get_config_item_or_set_default( + key='default_output_format', + default_value='png', + validator=lambda x: x in modules.flags.output_formats +) +default_image_number = get_config_item_or_set_default( + key='default_image_number', + default_value=2, + validator=lambda x: isinstance(x, int) and 1 <= x <= default_max_image_number +) +checkpoint_downloads = get_config_item_or_set_default( + key='checkpoint_downloads', + default_value={}, + validator=lambda x: isinstance(x, dict) and all(isinstance(k, str) and isinstance(v, str) for k, v in x.items()) +) +lora_downloads = get_config_item_or_set_default( + key='lora_downloads', + default_value={}, + validator=lambda x: isinstance(x, dict) and all(isinstance(k, str) and isinstance(v, str) for k, v in x.items()) +) +embeddings_downloads = get_config_item_or_set_default( + key='embeddings_downloads', + default_value={}, + validator=lambda x: isinstance(x, dict) and all(isinstance(k, str) and isinstance(v, str) for k, v in x.items()) +) +available_aspect_ratios = get_config_item_or_set_default( + key='available_aspect_ratios', + default_value=[ + '704*1408', '704*1344', '768*1344', '768*1280', '832*1216', '832*1152', + '896*1152', '896*1088', '960*1088', '960*1024', '1024*1024', '1024*960', + '1088*960', '1088*896', '1152*896', '1152*832', '1216*832', '1280*768', + '1344*768', '1344*704', '1408*704', '1472*704', '1536*640', '1600*640', + '1664*576', '1728*576' + ], + validator=lambda x: isinstance(x, list) and all('*' in v for v in x) and len(x) > 1 +) +default_aspect_ratio = get_config_item_or_set_default( + key='default_aspect_ratio', + default_value='1152*896' if '1152*896' in available_aspect_ratios else available_aspect_ratios[0], + validator=lambda x: x in available_aspect_ratios +) +default_inpaint_engine_version = get_config_item_or_set_default( + key='default_inpaint_engine_version', + default_value='v2.6', + validator=lambda x: x in modules.flags.inpaint_engine_versions +) +default_cfg_tsnr = get_config_item_or_set_default( + key='default_cfg_tsnr', + default_value=7.0, + validator=lambda x: isinstance(x, numbers.Number) +) +default_overwrite_step = get_config_item_or_set_default( + key='default_overwrite_step', + default_value=-1, + validator=lambda x: isinstance(x, int) +) +default_overwrite_switch = get_config_item_or_set_default( + key='default_overwrite_switch', + default_value=-1, + validator=lambda x: isinstance(x, int) +) +example_inpaint_prompts = get_config_item_or_set_default( + key='example_inpaint_prompts', + default_value=[ + 'highly detailed face', 'detailed girl face', 'detailed man face', 'detailed hand', 'beautiful eyes' + ], + validator=lambda x: isinstance(x, list) and all(isinstance(v, str) for v in x) +) +default_save_metadata_to_images = get_config_item_or_set_default( + key='default_save_metadata_to_images', + default_value=True, + validator=lambda x: isinstance(x, bool) +) +default_metadata_scheme = get_config_item_or_set_default( + key='default_metadata_scheme', + default_value=MetadataScheme.FOOOCUS.value, + validator=lambda x: x in [y[1] for y in modules.flags.metadata_scheme if y[1] == x] +) +metadata_created_by = get_config_item_or_set_default( + key='metadata_created_by', + default_value='', + validator=lambda x: isinstance(x, str) +) + +example_inpaint_prompts = [[x] for x in example_inpaint_prompts] + +config_dict["default_loras"] = default_loras = default_loras[:default_max_lora_number] + [['None', 1.0] for _ in range(default_max_lora_number - len(default_loras))] + +possible_preset_keys = [ + "default_model", + "default_refiner", + "default_refiner_switch", + "default_loras_min_weight", + "default_loras_max_weight", + "default_loras", + "default_max_lora_number", + "default_cfg_scale", + "default_sample_sharpness", + "default_sampler", + "default_scheduler", + "default_performance", + "default_prompt", + "default_prompt_negative", + "default_styles", + "default_aspect_ratio", + "default_save_metadata_to_images", + "checkpoint_downloads", + "embeddings_downloads", + "lora_downloads", +] + + +REWRITE_PRESET = False + +if REWRITE_PRESET and isinstance(args_manager.args.preset, str): + save_path = 'presets/' + args_manager.args.preset + '.json' + with open(save_path, "w", encoding="utf-8") as json_file: + json.dump({k: config_dict[k] for k in possible_preset_keys}, json_file, indent=4) + print(f'Preset saved to {save_path}. Exiting ...') + exit(0) + + +def add_ratio(x): + a, b = x.replace('*', ' ').split(' ')[:2] + a, b = int(a), int(b) + g = math.gcd(a, b) + return f'{a}×{b} \U00002223 {a // g}:{b // g}' + + +default_aspect_ratio = add_ratio(default_aspect_ratio) +available_aspect_ratios = [add_ratio(x) for x in available_aspect_ratios] + + +# Only write config in the first launch. +if not os.path.exists(config_path): + with open(config_path, "w", encoding="utf-8") as json_file: + json.dump({k: config_dict[k] for k in always_save_keys}, json_file, indent=4) + + +# Always write tutorials. +with open(config_example_path, "w", encoding="utf-8") as json_file: + cpa = config_path.replace("\\", "\\\\") + json_file.write(f'You can modify your "{cpa}" using the below keys, formats, and examples.\n' + f'Do not modify this file. Modifications in this file will not take effect.\n' + f'This file is a tutorial and example. Please edit "{cpa}" to really change any settings.\n' + + 'Remember to split the paths with "\\\\" rather than "\\", ' + 'and there is no "," before the last "}". \n\n\n') + json.dump({k: config_dict[k] for k in visited_keys}, json_file, indent=4) + +model_filenames = [] +lora_filenames = [] +sdxl_lcm_lora = 'sdxl_lcm_lora.safetensors' + + +def get_model_filenames(folder_paths, name_filter=None): + extensions = ['.pth', '.ckpt', '.bin', '.safetensors', '.fooocus.patch'] + files = [] + for folder in folder_paths: + files += get_files_from_folder(folder, extensions, name_filter) + return files + + +def update_all_model_names(): + global model_filenames, lora_filenames + model_filenames = get_model_filenames(paths_checkpoints) + lora_filenames = get_model_filenames(paths_loras) + return + + +def downloading_inpaint_models(v): + assert v in modules.flags.inpaint_engine_versions + + load_file_from_url( + url='https://huggingface.co/lllyasviel/fooocus_inpaint/resolve/main/fooocus_inpaint_head.pth', + model_dir=path_inpaint, + file_name='fooocus_inpaint_head.pth' + ) + head_file = os.path.join(path_inpaint, 'fooocus_inpaint_head.pth') + patch_file = None + + if v == 'v1': + load_file_from_url( + url='https://huggingface.co/lllyasviel/fooocus_inpaint/resolve/main/inpaint.fooocus.patch', + model_dir=path_inpaint, + file_name='inpaint.fooocus.patch' + ) + patch_file = os.path.join(path_inpaint, 'inpaint.fooocus.patch') + + if v == 'v2.5': + load_file_from_url( + url='https://huggingface.co/lllyasviel/fooocus_inpaint/resolve/main/inpaint_v25.fooocus.patch', + model_dir=path_inpaint, + file_name='inpaint_v25.fooocus.patch' + ) + patch_file = os.path.join(path_inpaint, 'inpaint_v25.fooocus.patch') + + if v == 'v2.6': + load_file_from_url( + url='https://huggingface.co/lllyasviel/fooocus_inpaint/resolve/main/inpaint_v26.fooocus.patch', + model_dir=path_inpaint, + file_name='inpaint_v26.fooocus.patch' + ) + patch_file = os.path.join(path_inpaint, 'inpaint_v26.fooocus.patch') + + return head_file, patch_file + + +def downloading_sdxl_lcm_lora(): + load_file_from_url( + url='https://huggingface.co/lllyasviel/misc/resolve/main/sdxl_lcm_lora.safetensors', + model_dir=paths_loras[0], + file_name=sdxl_lcm_lora + ) + return sdxl_lcm_lora + + +def downloading_controlnet_canny(): + load_file_from_url( + url='https://huggingface.co/lllyasviel/misc/resolve/main/control-lora-canny-rank128.safetensors', + model_dir=path_controlnet, + file_name='control-lora-canny-rank128.safetensors' + ) + return os.path.join(path_controlnet, 'control-lora-canny-rank128.safetensors') + + +def downloading_controlnet_cpds(): + load_file_from_url( + url='https://huggingface.co/lllyasviel/misc/resolve/main/fooocus_xl_cpds_128.safetensors', + model_dir=path_controlnet, + file_name='fooocus_xl_cpds_128.safetensors' + ) + return os.path.join(path_controlnet, 'fooocus_xl_cpds_128.safetensors') + + +def downloading_ip_adapters(v): + assert v in ['ip', 'face'] + + results = [] + + load_file_from_url( + url='https://huggingface.co/lllyasviel/misc/resolve/main/clip_vision_vit_h.safetensors', + model_dir=path_clip_vision, + file_name='clip_vision_vit_h.safetensors' + ) + results += [os.path.join(path_clip_vision, 'clip_vision_vit_h.safetensors')] + + load_file_from_url( + url='https://huggingface.co/lllyasviel/misc/resolve/main/fooocus_ip_negative.safetensors', + model_dir=path_controlnet, + file_name='fooocus_ip_negative.safetensors' + ) + results += [os.path.join(path_controlnet, 'fooocus_ip_negative.safetensors')] + + if v == 'ip': + load_file_from_url( + url='https://huggingface.co/lllyasviel/misc/resolve/main/ip-adapter-plus_sdxl_vit-h.bin', + model_dir=path_controlnet, + file_name='ip-adapter-plus_sdxl_vit-h.bin' + ) + results += [os.path.join(path_controlnet, 'ip-adapter-plus_sdxl_vit-h.bin')] + + if v == 'face': + load_file_from_url( + url='https://huggingface.co/lllyasviel/misc/resolve/main/ip-adapter-plus-face_sdxl_vit-h.bin', + model_dir=path_controlnet, + file_name='ip-adapter-plus-face_sdxl_vit-h.bin' + ) + results += [os.path.join(path_controlnet, 'ip-adapter-plus-face_sdxl_vit-h.bin')] + + return results + + +def downloading_upscale_model(): + load_file_from_url( + url='https://huggingface.co/lllyasviel/misc/resolve/main/fooocus_upscaler_s409985e5.bin', + model_dir=path_upscale_models, + file_name='fooocus_upscaler_s409985e5.bin' + ) + return os.path.join(path_upscale_models, 'fooocus_upscaler_s409985e5.bin') + + +update_all_model_names() diff --git a/modules/constants.py b/modules/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..667fa8682306e192465f11733fc9814bacedfe89 --- /dev/null +++ b/modules/constants.py @@ -0,0 +1,5 @@ +# as in k-diffusion (sampling.py) +MIN_SEED = 0 +MAX_SEED = 2**63 - 1 + +AUTH_FILENAME = 'auth.json' diff --git a/modules/core.py b/modules/core.py new file mode 100644 index 0000000000000000000000000000000000000000..bfc449661d9c636e096b8e9555daa3bebb5f50e7 --- /dev/null +++ b/modules/core.py @@ -0,0 +1,339 @@ +import os +import einops +import torch +import numpy as np + +import ldm_patched.modules.model_management +import ldm_patched.modules.model_detection +import ldm_patched.modules.model_patcher +import ldm_patched.modules.utils +import ldm_patched.modules.controlnet +import modules.sample_hijack +import ldm_patched.modules.samplers +import ldm_patched.modules.latent_formats + +from ldm_patched.modules.sd import load_checkpoint_guess_config +from ldm_patched.contrib.external import VAEDecode, EmptyLatentImage, VAEEncode, VAEEncodeTiled, VAEDecodeTiled, \ + ControlNetApplyAdvanced +from ldm_patched.contrib.external_freelunch import FreeU_V2 +from ldm_patched.modules.sample import prepare_mask +from modules.lora import match_lora +from modules.util import get_file_from_folder_list +from ldm_patched.modules.lora import model_lora_keys_unet, model_lora_keys_clip +from modules.config import path_embeddings +from ldm_patched.contrib.external_model_advanced import ModelSamplingDiscrete + + +opEmptyLatentImage = EmptyLatentImage() +opVAEDecode = VAEDecode() +opVAEEncode = VAEEncode() +opVAEDecodeTiled = VAEDecodeTiled() +opVAEEncodeTiled = VAEEncodeTiled() +opControlNetApplyAdvanced = ControlNetApplyAdvanced() +opFreeU = FreeU_V2() +opModelSamplingDiscrete = ModelSamplingDiscrete() + + +class StableDiffusionModel: + def __init__(self, unet=None, vae=None, clip=None, clip_vision=None, filename=None): + self.unet = unet + self.vae = vae + self.clip = clip + self.clip_vision = clip_vision + self.filename = filename + self.unet_with_lora = unet + self.clip_with_lora = clip + self.visited_loras = '' + + self.lora_key_map_unet = {} + self.lora_key_map_clip = {} + + if self.unet is not None: + self.lora_key_map_unet = model_lora_keys_unet(self.unet.model, self.lora_key_map_unet) + self.lora_key_map_unet.update({x: x for x in self.unet.model.state_dict().keys()}) + + if self.clip is not None: + self.lora_key_map_clip = model_lora_keys_clip(self.clip.cond_stage_model, self.lora_key_map_clip) + self.lora_key_map_clip.update({x: x for x in self.clip.cond_stage_model.state_dict().keys()}) + + @torch.no_grad() + @torch.inference_mode() + def refresh_loras(self, loras): + assert isinstance(loras, list) + + if self.visited_loras == str(loras): + return + + self.visited_loras = str(loras) + + if self.unet is None: + return + + print(f'Request to load LoRAs {str(loras)} for model [{self.filename}].') + + loras_to_load = [] + + for name, weight in loras: + if name == 'None': + continue + + if os.path.exists(name): + lora_filename = name + else: + lora_filename = get_file_from_folder_list(name, modules.config.paths_loras) + + if not os.path.exists(lora_filename): + print(f'Lora file not found: {lora_filename}') + continue + + loras_to_load.append((lora_filename, weight)) + + self.unet_with_lora = self.unet.clone() if self.unet is not None else None + self.clip_with_lora = self.clip.clone() if self.clip is not None else None + + for lora_filename, weight in loras_to_load: + lora_unmatch = ldm_patched.modules.utils.load_torch_file(lora_filename, safe_load=False) + lora_unet, lora_unmatch = match_lora(lora_unmatch, self.lora_key_map_unet) + lora_clip, lora_unmatch = match_lora(lora_unmatch, self.lora_key_map_clip) + + if len(lora_unmatch) > 12: + # model mismatch + continue + + if len(lora_unmatch) > 0: + print(f'Loaded LoRA [{lora_filename}] for model [{self.filename}] ' + f'with unmatched keys {list(lora_unmatch.keys())}') + + if self.unet_with_lora is not None and len(lora_unet) > 0: + loaded_keys = self.unet_with_lora.add_patches(lora_unet, weight) + print(f'Loaded LoRA [{lora_filename}] for UNet [{self.filename}] ' + f'with {len(loaded_keys)} keys at weight {weight}.') + for item in lora_unet: + if item not in loaded_keys: + print("UNet LoRA key skipped: ", item) + + if self.clip_with_lora is not None and len(lora_clip) > 0: + loaded_keys = self.clip_with_lora.add_patches(lora_clip, weight) + print(f'Loaded LoRA [{lora_filename}] for CLIP [{self.filename}] ' + f'with {len(loaded_keys)} keys at weight {weight}.') + for item in lora_clip: + if item not in loaded_keys: + print("CLIP LoRA key skipped: ", item) + + +@torch.no_grad() +@torch.inference_mode() +def apply_freeu(model, b1, b2, s1, s2): + return opFreeU.patch(model=model, b1=b1, b2=b2, s1=s1, s2=s2)[0] + + +@torch.no_grad() +@torch.inference_mode() +def load_controlnet(ckpt_filename): + return ldm_patched.modules.controlnet.load_controlnet(ckpt_filename) + + +@torch.no_grad() +@torch.inference_mode() +def apply_controlnet(positive, negative, control_net, image, strength, start_percent, end_percent): + return opControlNetApplyAdvanced.apply_controlnet(positive=positive, negative=negative, control_net=control_net, + image=image, strength=strength, start_percent=start_percent, end_percent=end_percent) + + +@torch.no_grad() +@torch.inference_mode() +def load_model(ckpt_filename): + unet, clip, vae, clip_vision = load_checkpoint_guess_config(ckpt_filename, embedding_directory=path_embeddings) + return StableDiffusionModel(unet=unet, clip=clip, vae=vae, clip_vision=clip_vision, filename=ckpt_filename) + + +@torch.no_grad() +@torch.inference_mode() +def generate_empty_latent(width=1024, height=1024, batch_size=1): + return opEmptyLatentImage.generate(width=width, height=height, batch_size=batch_size)[0] + + +@torch.no_grad() +@torch.inference_mode() +def decode_vae(vae, latent_image, tiled=False): + if tiled: + return opVAEDecodeTiled.decode(samples=latent_image, vae=vae, tile_size=512)[0] + else: + return opVAEDecode.decode(samples=latent_image, vae=vae)[0] + + +@torch.no_grad() +@torch.inference_mode() +def encode_vae(vae, pixels, tiled=False): + if tiled: + return opVAEEncodeTiled.encode(pixels=pixels, vae=vae, tile_size=512)[0] + else: + return opVAEEncode.encode(pixels=pixels, vae=vae)[0] + + +@torch.no_grad() +@torch.inference_mode() +def encode_vae_inpaint(vae, pixels, mask): + assert mask.ndim == 3 and pixels.ndim == 4 + assert mask.shape[-1] == pixels.shape[-2] + assert mask.shape[-2] == pixels.shape[-3] + + w = mask.round()[..., None] + pixels = pixels * (1 - w) + 0.5 * w + + latent = vae.encode(pixels) + B, C, H, W = latent.shape + + latent_mask = mask[:, None, :, :] + latent_mask = torch.nn.functional.interpolate(latent_mask, size=(H * 8, W * 8), mode="bilinear").round() + latent_mask = torch.nn.functional.max_pool2d(latent_mask, (8, 8)).round().to(latent) + + return latent, latent_mask + + +class VAEApprox(torch.nn.Module): + def __init__(self): + super(VAEApprox, self).__init__() + self.conv1 = torch.nn.Conv2d(4, 8, (7, 7)) + self.conv2 = torch.nn.Conv2d(8, 16, (5, 5)) + self.conv3 = torch.nn.Conv2d(16, 32, (3, 3)) + self.conv4 = torch.nn.Conv2d(32, 64, (3, 3)) + self.conv5 = torch.nn.Conv2d(64, 32, (3, 3)) + self.conv6 = torch.nn.Conv2d(32, 16, (3, 3)) + self.conv7 = torch.nn.Conv2d(16, 8, (3, 3)) + self.conv8 = torch.nn.Conv2d(8, 3, (3, 3)) + self.current_type = None + + def forward(self, x): + extra = 11 + x = torch.nn.functional.interpolate(x, (x.shape[2] * 2, x.shape[3] * 2)) + x = torch.nn.functional.pad(x, (extra, extra, extra, extra)) + for layer in [self.conv1, self.conv2, self.conv3, self.conv4, self.conv5, self.conv6, self.conv7, self.conv8]: + x = layer(x) + x = torch.nn.functional.leaky_relu(x, 0.1) + return x + + +VAE_approx_models = {} + + +@torch.no_grad() +@torch.inference_mode() +def get_previewer(model): + global VAE_approx_models + + from modules.config import path_vae_approx + is_sdxl = isinstance(model.model.latent_format, ldm_patched.modules.latent_formats.SDXL) + vae_approx_filename = os.path.join(path_vae_approx, 'xlvaeapp.pth' if is_sdxl else 'vaeapp_sd15.pth') + + if vae_approx_filename in VAE_approx_models: + VAE_approx_model = VAE_approx_models[vae_approx_filename] + else: + sd = torch.load(vae_approx_filename, map_location='cpu') + VAE_approx_model = VAEApprox() + VAE_approx_model.load_state_dict(sd) + del sd + VAE_approx_model.eval() + + if ldm_patched.modules.model_management.should_use_fp16(): + VAE_approx_model.half() + VAE_approx_model.current_type = torch.float16 + else: + VAE_approx_model.float() + VAE_approx_model.current_type = torch.float32 + + VAE_approx_model.to(ldm_patched.modules.model_management.get_torch_device()) + VAE_approx_models[vae_approx_filename] = VAE_approx_model + + @torch.no_grad() + @torch.inference_mode() + def preview_function(x0, step, total_steps): + with torch.no_grad(): + x_sample = x0.to(VAE_approx_model.current_type) + x_sample = VAE_approx_model(x_sample) * 127.5 + 127.5 + x_sample = einops.rearrange(x_sample, 'b c h w -> b h w c')[0] + x_sample = x_sample.cpu().numpy().clip(0, 255).astype(np.uint8) + return x_sample + + return preview_function + + +@torch.no_grad() +@torch.inference_mode() +def ksampler(model, positive, negative, latent, seed=None, steps=30, cfg=7.0, sampler_name='dpmpp_2m_sde_gpu', + scheduler='karras', denoise=1.0, disable_noise=False, start_step=None, last_step=None, + force_full_denoise=False, callback_function=None, refiner=None, refiner_switch=-1, + previewer_start=None, previewer_end=None, sigmas=None, noise_mean=None, disable_preview=False): + + if sigmas is not None: + sigmas = sigmas.clone().to(ldm_patched.modules.model_management.get_torch_device()) + + latent_image = latent["samples"] + + if disable_noise: + noise = torch.zeros(latent_image.size(), dtype=latent_image.dtype, layout=latent_image.layout, device="cpu") + else: + batch_inds = latent["batch_index"] if "batch_index" in latent else None + noise = ldm_patched.modules.sample.prepare_noise(latent_image, seed, batch_inds) + + if isinstance(noise_mean, torch.Tensor): + noise = noise + noise_mean - torch.mean(noise, dim=1, keepdim=True) + + noise_mask = None + if "noise_mask" in latent: + noise_mask = latent["noise_mask"] + + previewer = get_previewer(model) + + if previewer_start is None: + previewer_start = 0 + + if previewer_end is None: + previewer_end = steps + + def callback(step, x0, x, total_steps): + ldm_patched.modules.model_management.throw_exception_if_processing_interrupted() + y = None + if previewer is not None and not disable_preview: + y = previewer(x0, previewer_start + step, previewer_end) + if callback_function is not None: + callback_function(previewer_start + step, x0, x, previewer_end, y) + + disable_pbar = False + modules.sample_hijack.current_refiner = refiner + modules.sample_hijack.refiner_switch_step = refiner_switch + ldm_patched.modules.samplers.sample = modules.sample_hijack.sample_hacked + + try: + samples = ldm_patched.modules.sample.sample(model, + noise, steps, cfg, sampler_name, scheduler, + positive, negative, latent_image, + denoise=denoise, disable_noise=disable_noise, + start_step=start_step, + last_step=last_step, + force_full_denoise=force_full_denoise, noise_mask=noise_mask, + callback=callback, + disable_pbar=disable_pbar, seed=seed, sigmas=sigmas) + + out = latent.copy() + out["samples"] = samples + finally: + modules.sample_hijack.current_refiner = None + + return out + + +@torch.no_grad() +@torch.inference_mode() +def pytorch_to_numpy(x): + return [np.clip(255. * y.cpu().numpy(), 0, 255).astype(np.uint8) for y in x] + + +@torch.no_grad() +@torch.inference_mode() +def numpy_to_pytorch(x): + y = x.astype(np.float32) / 255.0 + y = y[None] + y = np.ascontiguousarray(y.copy()) + y = torch.from_numpy(y).float() + return y diff --git a/modules/default_pipeline.py b/modules/default_pipeline.py new file mode 100644 index 0000000000000000000000000000000000000000..f8edfae105fa45a103f9e1463e6abbee2c19444c --- /dev/null +++ b/modules/default_pipeline.py @@ -0,0 +1,498 @@ +import modules.core as core +import os +import torch +import modules.patch +import modules.config +import ldm_patched.modules.model_management +import ldm_patched.modules.latent_formats +import modules.inpaint_worker +import extras.vae_interpose as vae_interpose +from extras.expansion import FooocusExpansion + +from ldm_patched.modules.model_base import SDXL, SDXLRefiner +from modules.sample_hijack import clip_separate +from modules.util import get_file_from_folder_list + + +model_base = core.StableDiffusionModel() +model_refiner = core.StableDiffusionModel() + +final_expansion = None +final_unet = None +final_clip = None +final_vae = None +final_refiner_unet = None +final_refiner_vae = None + +loaded_ControlNets = {} + + +@torch.no_grad() +@torch.inference_mode() +def refresh_controlnets(model_paths): + global loaded_ControlNets + cache = {} + for p in model_paths: + if p is not None: + if p in loaded_ControlNets: + cache[p] = loaded_ControlNets[p] + else: + cache[p] = core.load_controlnet(p) + loaded_ControlNets = cache + return + + +@torch.no_grad() +@torch.inference_mode() +def assert_model_integrity(): + error_message = None + + if not isinstance(model_base.unet_with_lora.model, SDXL): + error_message = 'You have selected base model other than SDXL. This is not supported yet.' + + if error_message is not None: + raise NotImplementedError(error_message) + + return True + + +@torch.no_grad() +@torch.inference_mode() +def refresh_base_model(name): + global model_base + + filename = get_file_from_folder_list(name, modules.config.paths_checkpoints) + + if model_base.filename == filename: + return + + model_base = core.StableDiffusionModel() + model_base = core.load_model(filename) + print(f'Base model loaded: {model_base.filename}') + return + + +@torch.no_grad() +@torch.inference_mode() +def refresh_refiner_model(name): + global model_refiner + + filename = get_file_from_folder_list(name, modules.config.paths_checkpoints) + + if model_refiner.filename == filename: + return + + model_refiner = core.StableDiffusionModel() + + if name == 'None': + print(f'Refiner unloaded.') + return + + model_refiner = core.load_model(filename) + print(f'Refiner model loaded: {model_refiner.filename}') + + if isinstance(model_refiner.unet.model, SDXL): + model_refiner.clip = None + model_refiner.vae = None + elif isinstance(model_refiner.unet.model, SDXLRefiner): + model_refiner.clip = None + model_refiner.vae = None + else: + model_refiner.clip = None + + return + + +@torch.no_grad() +@torch.inference_mode() +def synthesize_refiner_model(): + global model_base, model_refiner + + print('Synthetic Refiner Activated') + model_refiner = core.StableDiffusionModel( + unet=model_base.unet, + vae=model_base.vae, + clip=model_base.clip, + clip_vision=model_base.clip_vision, + filename=model_base.filename + ) + model_refiner.vae = None + model_refiner.clip = None + model_refiner.clip_vision = None + + return + + +@torch.no_grad() +@torch.inference_mode() +def refresh_loras(loras, base_model_additional_loras=None): + global model_base, model_refiner + + if not isinstance(base_model_additional_loras, list): + base_model_additional_loras = [] + + model_base.refresh_loras(loras + base_model_additional_loras) + model_refiner.refresh_loras(loras) + + return + + +@torch.no_grad() +@torch.inference_mode() +def clip_encode_single(clip, text, verbose=False): + cached = clip.fcs_cond_cache.get(text, None) + if cached is not None: + if verbose: + print(f'[CLIP Cached] {text}') + return cached + tokens = clip.tokenize(text) + result = clip.encode_from_tokens(tokens, return_pooled=True) + clip.fcs_cond_cache[text] = result + if verbose: + print(f'[CLIP Encoded] {text}') + return result + + +@torch.no_grad() +@torch.inference_mode() +def clone_cond(conds): + results = [] + + for c, p in conds: + p = p["pooled_output"] + + if isinstance(c, torch.Tensor): + c = c.clone() + + if isinstance(p, torch.Tensor): + p = p.clone() + + results.append([c, {"pooled_output": p}]) + + return results + + +@torch.no_grad() +@torch.inference_mode() +def clip_encode(texts, pool_top_k=1): + global final_clip + + if final_clip is None: + return None + if not isinstance(texts, list): + return None + if len(texts) == 0: + return None + + cond_list = [] + pooled_acc = 0 + + for i, text in enumerate(texts): + cond, pooled = clip_encode_single(final_clip, text) + cond_list.append(cond) + if i < pool_top_k: + pooled_acc += pooled + + return [[torch.cat(cond_list, dim=1), {"pooled_output": pooled_acc}]] + + +@torch.no_grad() +@torch.inference_mode() +def clear_all_caches(): + final_clip.fcs_cond_cache = {} + + +@torch.no_grad() +@torch.inference_mode() +def prepare_text_encoder(async_call=True): + if async_call: + # TODO: make sure that this is always called in an async way so that users cannot feel it. + pass + assert_model_integrity() + ldm_patched.modules.model_management.load_models_gpu([final_clip.patcher, final_expansion.patcher]) + return + + +@torch.no_grad() +@torch.inference_mode() +def refresh_everything(refiner_model_name, base_model_name, loras, + base_model_additional_loras=None, use_synthetic_refiner=False): + global final_unet, final_clip, final_vae, final_refiner_unet, final_refiner_vae, final_expansion + + final_unet = None + final_clip = None + final_vae = None + final_refiner_unet = None + final_refiner_vae = None + + if use_synthetic_refiner and refiner_model_name == 'None': + print('Synthetic Refiner Activated') + refresh_base_model(base_model_name) + synthesize_refiner_model() + else: + refresh_refiner_model(refiner_model_name) + refresh_base_model(base_model_name) + + refresh_loras(loras, base_model_additional_loras=base_model_additional_loras) + assert_model_integrity() + + final_unet = model_base.unet_with_lora + final_clip = model_base.clip_with_lora + final_vae = model_base.vae + + final_refiner_unet = model_refiner.unet_with_lora + final_refiner_vae = model_refiner.vae + + if final_expansion is None: + final_expansion = FooocusExpansion() + + prepare_text_encoder(async_call=True) + clear_all_caches() + return + + +refresh_everything( + refiner_model_name=modules.config.default_refiner_model_name, + base_model_name=modules.config.default_base_model_name, + loras=modules.config.default_loras +) + + +@torch.no_grad() +@torch.inference_mode() +def vae_parse(latent): + if final_refiner_vae is None: + return latent + + result = vae_interpose.parse(latent["samples"]) + return {'samples': result} + + +@torch.no_grad() +@torch.inference_mode() +def calculate_sigmas_all(sampler, model, scheduler, steps): + from ldm_patched.modules.samplers import calculate_sigmas_scheduler + + discard_penultimate_sigma = False + if sampler in ['dpm_2', 'dpm_2_ancestral']: + steps += 1 + discard_penultimate_sigma = True + + sigmas = calculate_sigmas_scheduler(model, scheduler, steps) + + if discard_penultimate_sigma: + sigmas = torch.cat([sigmas[:-2], sigmas[-1:]]) + return sigmas + + +@torch.no_grad() +@torch.inference_mode() +def calculate_sigmas(sampler, model, scheduler, steps, denoise): + if denoise is None or denoise > 0.9999: + sigmas = calculate_sigmas_all(sampler, model, scheduler, steps) + else: + new_steps = int(steps / denoise) + sigmas = calculate_sigmas_all(sampler, model, scheduler, new_steps) + sigmas = sigmas[-(steps + 1):] + return sigmas + + +@torch.no_grad() +@torch.inference_mode() +def get_candidate_vae(steps, switch, denoise=1.0, refiner_swap_method='joint'): + assert refiner_swap_method in ['joint', 'separate', 'vae'] + + if final_refiner_vae is not None and final_refiner_unet is not None: + if denoise > 0.9: + return final_vae, final_refiner_vae + else: + if denoise > (float(steps - switch) / float(steps)) ** 0.834: # karras 0.834 + return final_vae, None + else: + return final_refiner_vae, None + + return final_vae, final_refiner_vae + + +@torch.no_grad() +@torch.inference_mode() +def process_diffusion(positive_cond, negative_cond, steps, switch, width, height, image_seed, callback, sampler_name, scheduler_name, latent=None, denoise=1.0, tiled=False, cfg_scale=7.0, refiner_swap_method='joint', disable_preview=False): + target_unet, target_vae, target_refiner_unet, target_refiner_vae, target_clip \ + = final_unet, final_vae, final_refiner_unet, final_refiner_vae, final_clip + + assert refiner_swap_method in ['joint', 'separate', 'vae'] + + if final_refiner_vae is not None and final_refiner_unet is not None: + # Refiner Use Different VAE (then it is SD15) + if denoise > 0.9: + refiner_swap_method = 'vae' + else: + refiner_swap_method = 'joint' + if denoise > (float(steps - switch) / float(steps)) ** 0.834: # karras 0.834 + target_unet, target_vae, target_refiner_unet, target_refiner_vae \ + = final_unet, final_vae, None, None + print(f'[Sampler] only use Base because of partial denoise.') + else: + positive_cond = clip_separate(positive_cond, target_model=final_refiner_unet.model, target_clip=final_clip) + negative_cond = clip_separate(negative_cond, target_model=final_refiner_unet.model, target_clip=final_clip) + target_unet, target_vae, target_refiner_unet, target_refiner_vae \ + = final_refiner_unet, final_refiner_vae, None, None + print(f'[Sampler] only use Refiner because of partial denoise.') + + print(f'[Sampler] refiner_swap_method = {refiner_swap_method}') + + if latent is None: + initial_latent = core.generate_empty_latent(width=width, height=height, batch_size=1) + else: + initial_latent = latent + + minmax_sigmas = calculate_sigmas(sampler=sampler_name, scheduler=scheduler_name, model=final_unet.model, steps=steps, denoise=denoise) + sigma_min, sigma_max = minmax_sigmas[minmax_sigmas > 0].min(), minmax_sigmas.max() + sigma_min = float(sigma_min.cpu().numpy()) + sigma_max = float(sigma_max.cpu().numpy()) + print(f'[Sampler] sigma_min = {sigma_min}, sigma_max = {sigma_max}') + + modules.patch.BrownianTreeNoiseSamplerPatched.global_init( + initial_latent['samples'].to(ldm_patched.modules.model_management.get_torch_device()), + sigma_min, sigma_max, seed=image_seed, cpu=False) + + decoded_latent = None + + if refiner_swap_method == 'joint': + sampled_latent = core.ksampler( + model=target_unet, + refiner=target_refiner_unet, + positive=positive_cond, + negative=negative_cond, + latent=initial_latent, + steps=steps, start_step=0, last_step=steps, disable_noise=False, force_full_denoise=True, + seed=image_seed, + denoise=denoise, + callback_function=callback, + cfg=cfg_scale, + sampler_name=sampler_name, + scheduler=scheduler_name, + refiner_switch=switch, + previewer_start=0, + previewer_end=steps, + disable_preview=disable_preview + ) + decoded_latent = core.decode_vae(vae=target_vae, latent_image=sampled_latent, tiled=tiled) + + if refiner_swap_method == 'separate': + sampled_latent = core.ksampler( + model=target_unet, + positive=positive_cond, + negative=negative_cond, + latent=initial_latent, + steps=steps, start_step=0, last_step=switch, disable_noise=False, force_full_denoise=False, + seed=image_seed, + denoise=denoise, + callback_function=callback, + cfg=cfg_scale, + sampler_name=sampler_name, + scheduler=scheduler_name, + previewer_start=0, + previewer_end=steps, + disable_preview=disable_preview + ) + print('Refiner swapped by changing ksampler. Noise preserved.') + + target_model = target_refiner_unet + if target_model is None: + target_model = target_unet + print('Use base model to refine itself - this may because of developer mode.') + + sampled_latent = core.ksampler( + model=target_model, + positive=clip_separate(positive_cond, target_model=target_model.model, target_clip=target_clip), + negative=clip_separate(negative_cond, target_model=target_model.model, target_clip=target_clip), + latent=sampled_latent, + steps=steps, start_step=switch, last_step=steps, disable_noise=True, force_full_denoise=True, + seed=image_seed, + denoise=denoise, + callback_function=callback, + cfg=cfg_scale, + sampler_name=sampler_name, + scheduler=scheduler_name, + previewer_start=switch, + previewer_end=steps, + disable_preview=disable_preview + ) + + target_model = target_refiner_vae + if target_model is None: + target_model = target_vae + decoded_latent = core.decode_vae(vae=target_model, latent_image=sampled_latent, tiled=tiled) + + if refiner_swap_method == 'vae': + modules.patch.patch_settings[os.getpid()].eps_record = 'vae' + + if modules.inpaint_worker.current_task is not None: + modules.inpaint_worker.current_task.unswap() + + sampled_latent = core.ksampler( + model=target_unet, + positive=positive_cond, + negative=negative_cond, + latent=initial_latent, + steps=steps, start_step=0, last_step=switch, disable_noise=False, force_full_denoise=True, + seed=image_seed, + denoise=denoise, + callback_function=callback, + cfg=cfg_scale, + sampler_name=sampler_name, + scheduler=scheduler_name, + previewer_start=0, + previewer_end=steps, + disable_preview=disable_preview + ) + print('Fooocus VAE-based swap.') + + target_model = target_refiner_unet + if target_model is None: + target_model = target_unet + print('Use base model to refine itself - this may because of developer mode.') + + sampled_latent = vae_parse(sampled_latent) + + k_sigmas = 1.4 + sigmas = calculate_sigmas(sampler=sampler_name, + scheduler=scheduler_name, + model=target_model.model, + steps=steps, + denoise=denoise)[switch:] * k_sigmas + len_sigmas = len(sigmas) - 1 + + noise_mean = torch.mean(modules.patch.patch_settings[os.getpid()].eps_record, dim=1, keepdim=True) + + if modules.inpaint_worker.current_task is not None: + modules.inpaint_worker.current_task.swap() + + sampled_latent = core.ksampler( + model=target_model, + positive=clip_separate(positive_cond, target_model=target_model.model, target_clip=target_clip), + negative=clip_separate(negative_cond, target_model=target_model.model, target_clip=target_clip), + latent=sampled_latent, + steps=len_sigmas, start_step=0, last_step=len_sigmas, disable_noise=False, force_full_denoise=True, + seed=image_seed+1, + denoise=denoise, + callback_function=callback, + cfg=cfg_scale, + sampler_name=sampler_name, + scheduler=scheduler_name, + previewer_start=switch, + previewer_end=steps, + sigmas=sigmas, + noise_mean=noise_mean, + disable_preview=disable_preview + ) + + target_model = target_refiner_vae + if target_model is None: + target_model = target_vae + decoded_latent = core.decode_vae(vae=target_model, latent_image=sampled_latent, tiled=tiled) + + images = core.pytorch_to_numpy(decoded_latent) + modules.patch.patch_settings[os.getpid()].eps_record = None + return images diff --git a/modules/flags.py b/modules/flags.py new file mode 100644 index 0000000000000000000000000000000000000000..6f12bc8f3f27c4b9ae06f2ee7ac0a90e46122b16 --- /dev/null +++ b/modules/flags.py @@ -0,0 +1,125 @@ +from enum import IntEnum, Enum + +disabled = 'Disabled' +enabled = 'Enabled' +subtle_variation = 'Vary (Subtle)' +strong_variation = 'Vary (Strong)' +upscale_15 = 'Upscale (1.5x)' +upscale_2 = 'Upscale (2x)' +upscale_fast = 'Upscale (Fast 2x)' + +uov_list = [ + disabled, subtle_variation, strong_variation, upscale_15, upscale_2, upscale_fast +] + +CIVITAI_NO_KARRAS = ["euler", "euler_ancestral", "heun", "dpm_fast", "dpm_adaptive", "ddim", "uni_pc"] + +# fooocus: a1111 (Civitai) +KSAMPLER = { + "euler": "Euler", + "euler_ancestral": "Euler a", + "heun": "Heun", + "heunpp2": "", + "dpm_2": "DPM2", + "dpm_2_ancestral": "DPM2 a", + "lms": "LMS", + "dpm_fast": "DPM fast", + "dpm_adaptive": "DPM adaptive", + "dpmpp_2s_ancestral": "DPM++ 2S a", + "dpmpp_sde": "DPM++ SDE", + "dpmpp_sde_gpu": "DPM++ SDE", + "dpmpp_2m": "DPM++ 2M", + "dpmpp_2m_sde": "DPM++ 2M SDE", + "dpmpp_2m_sde_gpu": "DPM++ 2M SDE", + "dpmpp_3m_sde": "", + "dpmpp_3m_sde_gpu": "", + "ddpm": "", + "lcm": "LCM" +} + +SAMPLER_EXTRA = { + "ddim": "DDIM", + "uni_pc": "UniPC", + "uni_pc_bh2": "" +} + +SAMPLERS = KSAMPLER | SAMPLER_EXTRA + +KSAMPLER_NAMES = list(KSAMPLER.keys()) + +SCHEDULER_NAMES = ["normal", "karras", "exponential", "sgm_uniform", "simple", "ddim_uniform", "lcm", "turbo"] +SAMPLER_NAMES = KSAMPLER_NAMES + list(SAMPLER_EXTRA.keys()) + +sampler_list = SAMPLER_NAMES +scheduler_list = SCHEDULER_NAMES + +refiner_swap_method = 'joint' + +cn_ip = "ImagePrompt" +cn_ip_face = "FaceSwap" +cn_canny = "PyraCanny" +cn_cpds = "CPDS" + +ip_list = [cn_ip, cn_canny, cn_cpds, cn_ip_face] +default_ip = cn_ip + +default_parameters = { + cn_ip: (0.5, 0.6), cn_ip_face: (0.9, 0.75), cn_canny: (0.5, 1.0), cn_cpds: (0.5, 1.0) +} # stop, weight + +output_formats = ['png', 'jpg', 'webp'] + +inpaint_engine_versions = ['None', 'v1', 'v2.5', 'v2.6'] +inpaint_option_default = 'Inpaint or Outpaint (default)' +inpaint_option_detail = 'Improve Detail (face, hand, eyes, etc.)' +inpaint_option_modify = 'Modify Content (add objects, change background, etc.)' +inpaint_options = [inpaint_option_default, inpaint_option_detail, inpaint_option_modify] + +desc_type_photo = 'Photograph' +desc_type_anime = 'Art/Anime' + + +class MetadataScheme(Enum): + FOOOCUS = 'fooocus' + A1111 = 'a1111' + + +metadata_scheme = [ + (f'{MetadataScheme.FOOOCUS.value} (json)', MetadataScheme.FOOOCUS.value), + (f'{MetadataScheme.A1111.value} (plain text)', MetadataScheme.A1111.value), +] + +lora_count = 5 + +controlnet_image_count = 4 + + +class Steps(IntEnum): + QUALITY = 60 + SPEED = 30 + EXTREME_SPEED = 8 + + +class StepsUOV(IntEnum): + QUALITY = 36 + SPEED = 18 + EXTREME_SPEED = 8 + + +class Performance(Enum): + QUALITY = 'Quality' + SPEED = 'Speed' + EXTREME_SPEED = 'Extreme Speed' + + @classmethod + def list(cls) -> list: + return list(map(lambda c: c.value, cls)) + + def steps(self) -> int | None: + return Steps[self.name].value if Steps[self.name] else None + + def steps_uov(self) -> int | None: + return StepsUOV[self.name].value if Steps[self.name] else None + + +performance_selections = Performance.list() diff --git a/modules/gradio_hijack.py b/modules/gradio_hijack.py new file mode 100644 index 0000000000000000000000000000000000000000..181429ec39a0336ffa43ebf23e4fa2b87dd97674 --- /dev/null +++ b/modules/gradio_hijack.py @@ -0,0 +1,480 @@ +"""gr.Image() component.""" + +from __future__ import annotations + +import warnings +from pathlib import Path +from typing import Any, Literal + +import numpy as np +import PIL +import PIL.ImageOps +import gradio.routes +import importlib + +from gradio_client import utils as client_utils +from gradio_client.documentation import document, set_documentation_group +from gradio_client.serializing import ImgSerializable +from PIL import Image as _Image # using _ to minimize namespace pollution + +from gradio import processing_utils, utils +from gradio.components.base import IOComponent, _Keywords, Block +from gradio.deprecation import warn_style_method_deprecation +from gradio.events import ( + Changeable, + Clearable, + Editable, + EventListenerMethod, + Selectable, + Streamable, + Uploadable, +) +from gradio.interpretation import TokenInterpretable + +set_documentation_group("component") +_Image.init() # fixes https://github.com/gradio-app/gradio/issues/2843 + + +@document() +class Image( + Editable, + Clearable, + Changeable, + Streamable, + Selectable, + Uploadable, + IOComponent, + ImgSerializable, + TokenInterpretable, +): + """ + Creates an image component that can be used to upload/draw images (as an input) or display images (as an output). + Preprocessing: passes the uploaded image as a {numpy.array}, {PIL.Image} or {str} filepath depending on `type` -- unless `tool` is `sketch` AND source is one of `upload` or `webcam`. In these cases, a {dict} with keys `image` and `mask` is passed, and the format of the corresponding values depends on `type`. + Postprocessing: expects a {numpy.array}, {PIL.Image} or {str} or {pathlib.Path} filepath to an image and displays the image. + Examples-format: a {str} filepath to a local file that contains the image. + Demos: image_mod, image_mod_default_image + Guides: image-classification-in-pytorch, image-classification-in-tensorflow, image-classification-with-vision-transformers, building-a-pictionary_app, create-your-own-friends-with-a-gan + """ + + def __init__( + self, + value: str | _Image.Image | np.ndarray | None = None, + *, + shape: tuple[int, int] | None = None, + height: int | None = None, + width: int | None = None, + image_mode: Literal[ + "1", "L", "P", "RGB", "RGBA", "CMYK", "YCbCr", "LAB", "HSV", "I", "F" + ] = "RGB", + invert_colors: bool = False, + source: Literal["upload", "webcam", "canvas"] = "upload", + tool: Literal["editor", "select", "sketch", "color-sketch"] | None = None, + type: Literal["numpy", "pil", "filepath"] = "numpy", + label: str | None = None, + every: float | None = None, + show_label: bool | None = None, + show_download_button: bool = True, + container: bool = True, + scale: int | None = None, + min_width: int = 160, + interactive: bool | None = None, + visible: bool = True, + streaming: bool = False, + elem_id: str | None = None, + elem_classes: list[str] | str | None = None, + mirror_webcam: bool = True, + brush_radius: float | None = None, + brush_color: str = "#000000", + mask_opacity: float = 0.7, + show_share_button: bool | None = None, + **kwargs, + ): + """ + Parameters: + value: A PIL Image, numpy array, path or URL for the default value that Image component is going to take. If callable, the function will be called whenever the app loads to set the initial value of the component. + shape: (width, height) shape to crop and resize image when passed to function. If None, matches input image size. Pass None for either width or height to only crop and resize the other. + height: Height of the displayed image in pixels. + width: Width of the displayed image in pixels. + image_mode: "RGB" if color, or "L" if black and white. See https://pillow.readthedocs.io/en/stable/handbook/concepts.html for other supported image modes and their meaning. + invert_colors: whether to invert the image as a preprocessing step. + source: Source of image. "upload" creates a box where user can drop an image file, "webcam" allows user to take snapshot from their webcam, "canvas" defaults to a white image that can be edited and drawn upon with tools. + tool: Tools used for editing. "editor" allows a full screen editor (and is the default if source is "upload" or "webcam"), "select" provides a cropping and zoom tool, "sketch" allows you to create a binary sketch (and is the default if source="canvas"), and "color-sketch" allows you to created a sketch in different colors. "color-sketch" can be used with source="upload" or "webcam" to allow sketching on an image. "sketch" can also be used with "upload" or "webcam" to create a mask over an image and in that case both the image and mask are passed into the function as a dictionary with keys "image" and "mask" respectively. + type: The format the image is converted to before being passed into the prediction function. "numpy" converts the image to a numpy array with shape (height, width, 3) and values from 0 to 255, "pil" converts the image to a PIL image object, "filepath" passes a str path to a temporary file containing the image. + label: component name in interface. + every: If `value` is a callable, run the function 'every' number of seconds while the client connection is open. Has no effect otherwise. Queue must be enabled. The event can be accessed (e.g. to cancel it) via this component's .load_event attribute. + show_label: if True, will display label. + show_download_button: If True, will display button to download image. + container: If True, will place the component in a container - providing some extra padding around the border. + scale: relative width compared to adjacent Components in a Row. For example, if Component A has scale=2, and Component B has scale=1, A will be twice as wide as B. Should be an integer. + min_width: minimum pixel width, will wrap if not sufficient screen space to satisfy this value. If a certain scale value results in this Component being narrower than min_width, the min_width parameter will be respected first. + interactive: if True, will allow users to upload and edit an image; if False, can only be used to display images. If not provided, this is inferred based on whether the component is used as an input or output. + visible: If False, component will be hidden. + streaming: If True when used in a `live` interface, will automatically stream webcam feed. Only valid is source is 'webcam'. + elem_id: An optional string that is assigned as the id of this component in the HTML DOM. Can be used for targeting CSS styles. + elem_classes: An optional list of strings that are assigned as the classes of this component in the HTML DOM. Can be used for targeting CSS styles. + mirror_webcam: If True webcam will be mirrored. Default is True. + brush_radius: Size of the brush for Sketch. Default is None which chooses a sensible default + brush_color: Color of the brush for Sketch as hex string. Default is "#000000". + mask_opacity: Opacity of mask drawn on image, as a value between 0 and 1. + show_share_button: If True, will show a share icon in the corner of the component that allows user to share outputs to Hugging Face Spaces Discussions. If False, icon does not appear. If set to None (default behavior), then the icon appears if this Gradio app is launched on Spaces, but not otherwise. + """ + self.brush_radius = brush_radius + self.brush_color = brush_color + self.mask_opacity = mask_opacity + self.mirror_webcam = mirror_webcam + valid_types = ["numpy", "pil", "filepath"] + if type not in valid_types: + raise ValueError( + f"Invalid value for parameter `type`: {type}. Please choose from one of: {valid_types}" + ) + self.type = type + self.shape = shape + self.height = height + self.width = width + self.image_mode = image_mode + valid_sources = ["upload", "webcam", "canvas"] + if source not in valid_sources: + raise ValueError( + f"Invalid value for parameter `source`: {source}. Please choose from one of: {valid_sources}" + ) + self.source = source + if tool is None: + self.tool = "sketch" if source == "canvas" else "editor" + else: + self.tool = tool + self.invert_colors = invert_colors + self.streaming = streaming + self.show_download_button = show_download_button + if streaming and source != "webcam": + raise ValueError("Image streaming only available if source is 'webcam'.") + self.select: EventListenerMethod + """ + Event listener for when the user clicks on a pixel within the image. + Uses event data gradio.SelectData to carry `index` to refer to the [x, y] coordinates of the clicked pixel. + See EventData documentation on how to use this event data. + """ + self.show_share_button = ( + (utils.get_space() is not None) + if show_share_button is None + else show_share_button + ) + IOComponent.__init__( + self, + label=label, + every=every, + show_label=show_label, + container=container, + scale=scale, + min_width=min_width, + interactive=interactive, + visible=visible, + elem_id=elem_id, + elem_classes=elem_classes, + value=value, + **kwargs, + ) + TokenInterpretable.__init__(self) + + def get_config(self): + return { + "image_mode": self.image_mode, + "shape": self.shape, + "height": self.height, + "width": self.width, + "source": self.source, + "tool": self.tool, + "value": self.value, + "streaming": self.streaming, + "mirror_webcam": self.mirror_webcam, + "brush_radius": self.brush_radius, + "brush_color": self.brush_color, + "mask_opacity": self.mask_opacity, + "selectable": self.selectable, + "show_share_button": self.show_share_button, + "show_download_button": self.show_download_button, + **IOComponent.get_config(self), + } + + @staticmethod + def update( + value: Any | Literal[_Keywords.NO_VALUE] | None = _Keywords.NO_VALUE, + height: int | None = None, + width: int | None = None, + label: str | None = None, + show_label: bool | None = None, + show_download_button: bool | None = None, + container: bool | None = None, + scale: int | None = None, + min_width: int | None = None, + interactive: bool | None = None, + visible: bool | None = None, + brush_radius: float | None = None, + brush_color: str | None = None, + mask_opacity: float | None = None, + show_share_button: bool | None = None, + ): + return { + "height": height, + "width": width, + "label": label, + "show_label": show_label, + "show_download_button": show_download_button, + "container": container, + "scale": scale, + "min_width": min_width, + "interactive": interactive, + "visible": visible, + "value": value, + "brush_radius": brush_radius, + "brush_color": brush_color, + "mask_opacity": mask_opacity, + "show_share_button": show_share_button, + "__type__": "update", + } + + def _format_image( + self, im: _Image.Image | None + ) -> np.ndarray | _Image.Image | str | None: + """Helper method to format an image based on self.type""" + if im is None: + return im + fmt = im.format + if self.type == "pil": + return im + elif self.type == "numpy": + return np.array(im) + elif self.type == "filepath": + path = self.pil_to_temp_file( + im, dir=self.DEFAULT_TEMP_DIR, format=fmt or "png" + ) + self.temp_files.add(path) + return path + else: + raise ValueError( + "Unknown type: " + + str(self.type) + + ". Please choose from: 'numpy', 'pil', 'filepath'." + ) + + def preprocess( + self, x: str | dict[str, str] + ) -> np.ndarray | _Image.Image | str | dict | None: + """ + Parameters: + x: base64 url data, or (if tool == "sketch") a dict of image and mask base64 url data + Returns: + image in requested format, or (if tool == "sketch") a dict of image and mask in requested format + """ + if x is None: + return x + + mask = None + + if self.tool == "sketch" and self.source in ["upload", "webcam"]: + if isinstance(x, dict): + x, mask = x["image"], x["mask"] + + assert isinstance(x, str) + im = processing_utils.decode_base64_to_image(x) + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + im = im.convert(self.image_mode) + if self.shape is not None: + im = processing_utils.resize_and_crop(im, self.shape) + if self.invert_colors: + im = PIL.ImageOps.invert(im) + if ( + self.source == "webcam" + and self.mirror_webcam is True + and self.tool != "color-sketch" + ): + im = PIL.ImageOps.mirror(im) + + if self.tool == "sketch" and self.source in ["upload", "webcam"]: + if mask is not None: + mask_im = processing_utils.decode_base64_to_image(mask) + if mask_im.mode == "RGBA": # whiten any opaque pixels in the mask + alpha_data = mask_im.getchannel("A").convert("L") + mask_im = _Image.merge("RGB", [alpha_data, alpha_data, alpha_data]) + return { + "image": self._format_image(im), + "mask": self._format_image(mask_im), + } + else: + return { + "image": self._format_image(im), + "mask": None, + } + + return self._format_image(im) + + def postprocess( + self, y: np.ndarray | _Image.Image | str | Path | None + ) -> str | None: + """ + Parameters: + y: image as a numpy array, PIL Image, string/Path filepath, or string URL + Returns: + base64 url data + """ + if y is None: + return None + if isinstance(y, np.ndarray): + return processing_utils.encode_array_to_base64(y) + elif isinstance(y, _Image.Image): + return processing_utils.encode_pil_to_base64(y) + elif isinstance(y, (str, Path)): + return client_utils.encode_url_or_file_to_base64(y) + else: + raise ValueError("Cannot process this value as an Image") + + def set_interpret_parameters(self, segments: int = 16): + """ + Calculates interpretation score of image subsections by splitting the image into subsections, then using a "leave one out" method to calculate the score of each subsection by whiting out the subsection and measuring the delta of the output value. + Parameters: + segments: Number of interpretation segments to split image into. + """ + self.interpretation_segments = segments + return self + + def _segment_by_slic(self, x): + """ + Helper method that segments an image into superpixels using slic. + Parameters: + x: base64 representation of an image + """ + x = processing_utils.decode_base64_to_image(x) + if self.shape is not None: + x = processing_utils.resize_and_crop(x, self.shape) + resized_and_cropped_image = np.array(x) + try: + from skimage.segmentation import slic + except (ImportError, ModuleNotFoundError) as err: + raise ValueError( + "Error: running this interpretation for images requires scikit-image, please install it first." + ) from err + try: + segments_slic = slic( + resized_and_cropped_image, + self.interpretation_segments, + compactness=10, + sigma=1, + start_label=1, + ) + except TypeError: # For skimage 0.16 and older + segments_slic = slic( + resized_and_cropped_image, + self.interpretation_segments, + compactness=10, + sigma=1, + ) + return segments_slic, resized_and_cropped_image + + def tokenize(self, x): + """ + Segments image into tokens, masks, and leave-one-out-tokens + Parameters: + x: base64 representation of an image + Returns: + tokens: list of tokens, used by the get_masked_input() method + leave_one_out_tokens: list of left-out tokens, used by the get_interpretation_neighbors() method + masks: list of masks, used by the get_interpretation_neighbors() method + """ + segments_slic, resized_and_cropped_image = self._segment_by_slic(x) + tokens, masks, leave_one_out_tokens = [], [], [] + replace_color = np.mean(resized_and_cropped_image, axis=(0, 1)) + for segment_value in np.unique(segments_slic): + mask = segments_slic == segment_value + image_screen = np.copy(resized_and_cropped_image) + image_screen[segments_slic == segment_value] = replace_color + leave_one_out_tokens.append( + processing_utils.encode_array_to_base64(image_screen) + ) + token = np.copy(resized_and_cropped_image) + token[segments_slic != segment_value] = 0 + tokens.append(token) + masks.append(mask) + return tokens, leave_one_out_tokens, masks + + def get_masked_inputs(self, tokens, binary_mask_matrix): + masked_inputs = [] + for binary_mask_vector in binary_mask_matrix: + masked_input = np.zeros_like(tokens[0], dtype=int) + for token, b in zip(tokens, binary_mask_vector): + masked_input = masked_input + token * int(b) + masked_inputs.append(processing_utils.encode_array_to_base64(masked_input)) + return masked_inputs + + def get_interpretation_scores( + self, x, neighbors, scores, masks, tokens=None, **kwargs + ) -> list[list[float]]: + """ + Returns: + A 2D array representing the interpretation score of each pixel of the image. + """ + x = processing_utils.decode_base64_to_image(x) + if self.shape is not None: + x = processing_utils.resize_and_crop(x, self.shape) + x = np.array(x) + output_scores = np.zeros((x.shape[0], x.shape[1])) + + for score, mask in zip(scores, masks): + output_scores += score * mask + + max_val, min_val = np.max(output_scores), np.min(output_scores) + if max_val > 0: + output_scores = (output_scores - min_val) / (max_val - min_val) + return output_scores.tolist() + + def style(self, *, height: int | None = None, width: int | None = None, **kwargs): + """ + This method is deprecated. Please set these arguments in the constructor instead. + """ + warn_style_method_deprecation() + if height is not None: + self.height = height + if width is not None: + self.width = width + return self + + def check_streamable(self): + if self.source != "webcam": + raise ValueError("Image streaming only available if source is 'webcam'.") + + def as_example(self, input_data: str | None) -> str: + if input_data is None: + return "" + elif ( + self.root_url + ): # If an externally hosted image, don't convert to absolute path + return input_data + return str(utils.abspath(input_data)) + + +all_components = [] + +if not hasattr(Block, 'original__init__'): + Block.original_init = Block.__init__ + + +def blk_ini(self, *args, **kwargs): + all_components.append(self) + return Block.original_init(self, *args, **kwargs) + + +Block.__init__ = blk_ini + + +gradio.routes.asyncio = importlib.reload(gradio.routes.asyncio) + +if not hasattr(gradio.routes.asyncio, 'original_wait_for'): + gradio.routes.asyncio.original_wait_for = gradio.routes.asyncio.wait_for + + +def patched_wait_for(fut, timeout): + del timeout + return gradio.routes.asyncio.original_wait_for(fut, timeout=65535) + + +gradio.routes.asyncio.wait_for = patched_wait_for + diff --git a/modules/html.py b/modules/html.py new file mode 100644 index 0000000000000000000000000000000000000000..769151a9ff86e460d69d3598fcac0481d59cf17b --- /dev/null +++ b/modules/html.py @@ -0,0 +1,146 @@ +css = ''' +.loader-container { + display: flex; /* Use flex to align items horizontally */ + align-items: center; /* Center items vertically within the container */ + white-space: nowrap; /* Prevent line breaks within the container */ +} + +.loader { + border: 8px solid #f3f3f3; /* Light grey */ + border-top: 8px solid #3498db; /* Blue */ + border-radius: 50%; + width: 30px; + height: 30px; + animation: spin 2s linear infinite; +} + +@keyframes spin { + 0% { transform: rotate(0deg); } + 100% { transform: rotate(360deg); } +} + +/* Style the progress bar */ +progress { + appearance: none; /* Remove default styling */ + height: 20px; /* Set the height of the progress bar */ + border-radius: 5px; /* Round the corners of the progress bar */ + background-color: #f3f3f3; /* Light grey background */ + width: 100%; +} + +/* Style the progress bar container */ +.progress-container { + margin-left: 20px; + margin-right: 20px; + flex-grow: 1; /* Allow the progress container to take up remaining space */ +} + +/* Set the color of the progress bar fill */ +progress::-webkit-progress-value { + background-color: #3498db; /* Blue color for the fill */ +} + +progress::-moz-progress-bar { + background-color: #3498db; /* Blue color for the fill in Firefox */ +} + +/* Style the text on the progress bar */ +progress::after { + content: attr(value '%'); /* Display the progress value followed by '%' */ + position: absolute; + top: 50%; + left: 50%; + transform: translate(-50%, -50%); + color: white; /* Set text color */ + font-size: 14px; /* Set font size */ +} + +/* Style other texts */ +.loader-container > span { + margin-left: 5px; /* Add spacing between the progress bar and the text */ +} + +.progress-bar > .generating { + display: none !important; +} + +.progress-bar{ + height: 30px !important; +} + +.type_row{ + height: 80px !important; +} + +.type_row_half{ + height: 32px !important; +} + +.scroll-hide{ + resize: none !important; +} + +.refresh_button{ + border: none !important; + background: none !important; + font-size: none !important; + box-shadow: none !important; +} + +.advanced_check_row{ + width: 250px !important; +} + +.min_check{ + min-width: min(1px, 100%) !important; +} + +.resizable_area { + resize: vertical; + overflow: auto !important; +} + +.aspect_ratios label { + width: 140px !important; +} + +.aspect_ratios label span { + white-space: nowrap !important; +} + +.aspect_ratios label input { + margin-left: -5px !important; +} + +.lora_enable label { + height: 100%; +} + +.lora_enable label input { + margin: auto; +} + +.lora_enable label span { + display: none; +} + +@-moz-document url-prefix() { + .lora_weight input[type=number] { + width: 80px; + } +} + +''' +progress_html = ''' +
+
+
+ +
+ *text* +
+''' + + +def make_progress_html(number, text): + return progress_html.replace('*number*', str(number)).replace('*text*', text) diff --git a/modules/inpaint_worker.py b/modules/inpaint_worker.py new file mode 100644 index 0000000000000000000000000000000000000000..43a7ae23e9bd2cebda69b94013bf1661bd8fd952 --- /dev/null +++ b/modules/inpaint_worker.py @@ -0,0 +1,264 @@ +import torch +import numpy as np + +from PIL import Image, ImageFilter +from modules.util import resample_image, set_image_shape_ceil, get_image_shape_ceil +from modules.upscaler import perform_upscale +import cv2 + + +inpaint_head_model = None + + +class InpaintHead(torch.nn.Module): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.head = torch.nn.Parameter(torch.empty(size=(320, 5, 3, 3), device='cpu')) + + def __call__(self, x): + x = torch.nn.functional.pad(x, (1, 1, 1, 1), "replicate") + return torch.nn.functional.conv2d(input=x, weight=self.head) + + +current_task = None + + +def box_blur(x, k): + x = Image.fromarray(x) + x = x.filter(ImageFilter.BoxBlur(k)) + return np.array(x) + + +def max_filter_opencv(x, ksize=3): + # Use OpenCV maximum filter + # Make sure the input type is int16 + return cv2.dilate(x, np.ones((ksize, ksize), dtype=np.int16)) + + +def morphological_open(x): + # Convert array to int16 type via threshold operation + x_int16 = np.zeros_like(x, dtype=np.int16) + x_int16[x > 127] = 256 + + for i in range(32): + # Use int16 type to avoid overflow + maxed = max_filter_opencv(x_int16, ksize=3) - 8 + x_int16 = np.maximum(maxed, x_int16) + + # Clip negative values to 0 and convert back to uint8 type + x_uint8 = np.clip(x_int16, 0, 255).astype(np.uint8) + return x_uint8 + + +def up255(x, t=0): + y = np.zeros_like(x).astype(np.uint8) + y[x > t] = 255 + return y + + +def imsave(x, path): + x = Image.fromarray(x) + x.save(path) + + +def regulate_abcd(x, a, b, c, d): + H, W = x.shape[:2] + if a < 0: + a = 0 + if a > H: + a = H + if b < 0: + b = 0 + if b > H: + b = H + if c < 0: + c = 0 + if c > W: + c = W + if d < 0: + d = 0 + if d > W: + d = W + return int(a), int(b), int(c), int(d) + + +def compute_initial_abcd(x): + indices = np.where(x) + a = np.min(indices[0]) + b = np.max(indices[0]) + c = np.min(indices[1]) + d = np.max(indices[1]) + abp = (b + a) // 2 + abm = (b - a) // 2 + cdp = (d + c) // 2 + cdm = (d - c) // 2 + l = int(max(abm, cdm) * 1.15) + a = abp - l + b = abp + l + 1 + c = cdp - l + d = cdp + l + 1 + a, b, c, d = regulate_abcd(x, a, b, c, d) + return a, b, c, d + + +def solve_abcd(x, a, b, c, d, k): + k = float(k) + assert 0.0 <= k <= 1.0 + + H, W = x.shape[:2] + if k == 1.0: + return 0, H, 0, W + while True: + if b - a >= H * k and d - c >= W * k: + break + + add_h = (b - a) < (d - c) + add_w = not add_h + + if b - a == H: + add_w = True + + if d - c == W: + add_h = True + + if add_h: + a -= 1 + b += 1 + + if add_w: + c -= 1 + d += 1 + + a, b, c, d = regulate_abcd(x, a, b, c, d) + return a, b, c, d + + +def fooocus_fill(image, mask): + current_image = image.copy() + raw_image = image.copy() + area = np.where(mask < 127) + store = raw_image[area] + + for k, repeats in [(512, 2), (256, 2), (128, 4), (64, 4), (33, 8), (15, 8), (5, 16), (3, 16)]: + for _ in range(repeats): + current_image = box_blur(current_image, k) + current_image[area] = store + + return current_image + + +class InpaintWorker: + def __init__(self, image, mask, use_fill=True, k=0.618): + a, b, c, d = compute_initial_abcd(mask > 0) + a, b, c, d = solve_abcd(mask, a, b, c, d, k=k) + + # interested area + self.interested_area = (a, b, c, d) + self.interested_mask = mask[a:b, c:d] + self.interested_image = image[a:b, c:d] + + # super resolution + if get_image_shape_ceil(self.interested_image) < 1024: + self.interested_image = perform_upscale(self.interested_image) + + # resize to make images ready for diffusion + self.interested_image = set_image_shape_ceil(self.interested_image, 1024) + self.interested_fill = self.interested_image.copy() + H, W, C = self.interested_image.shape + + # process mask + self.interested_mask = up255(resample_image(self.interested_mask, W, H), t=127) + + # compute filling + if use_fill: + self.interested_fill = fooocus_fill(self.interested_image, self.interested_mask) + + # soft pixels + self.mask = morphological_open(mask) + self.image = image + + # ending + self.latent = None + self.latent_after_swap = None + self.swapped = False + self.latent_mask = None + self.inpaint_head_feature = None + return + + def load_latent(self, latent_fill, latent_mask, latent_swap=None): + self.latent = latent_fill + self.latent_mask = latent_mask + self.latent_after_swap = latent_swap + return + + def patch(self, inpaint_head_model_path, inpaint_latent, inpaint_latent_mask, model): + global inpaint_head_model + + if inpaint_head_model is None: + inpaint_head_model = InpaintHead() + sd = torch.load(inpaint_head_model_path, map_location='cpu') + inpaint_head_model.load_state_dict(sd) + + feed = torch.cat([ + inpaint_latent_mask, + model.model.process_latent_in(inpaint_latent) + ], dim=1) + + inpaint_head_model.to(device=feed.device, dtype=feed.dtype) + inpaint_head_feature = inpaint_head_model(feed) + + def input_block_patch(h, transformer_options): + if transformer_options["block"][1] == 0: + h = h + inpaint_head_feature.to(h) + return h + + m = model.clone() + m.set_model_input_block_patch(input_block_patch) + return m + + def swap(self): + if self.swapped: + return + + if self.latent is None: + return + + if self.latent_after_swap is None: + return + + self.latent, self.latent_after_swap = self.latent_after_swap, self.latent + self.swapped = True + return + + def unswap(self): + if not self.swapped: + return + + if self.latent is None: + return + + if self.latent_after_swap is None: + return + + self.latent, self.latent_after_swap = self.latent_after_swap, self.latent + self.swapped = False + return + + def color_correction(self, img): + fg = img.astype(np.float32) + bg = self.image.copy().astype(np.float32) + w = self.mask[:, :, None].astype(np.float32) / 255.0 + y = fg * w + bg * (1 - w) + return y.clip(0, 255).astype(np.uint8) + + def post_process(self, img): + a, b, c, d = self.interested_area + content = resample_image(img, d - c, b - a) + result = self.image.copy() + result[a:b, c:d] = content + result = self.color_correction(result) + return result + + def visualize_mask_processing(self): + return [self.interested_fill, self.interested_mask, self.interested_image] + diff --git a/modules/launch_util.py b/modules/launch_util.py new file mode 100644 index 0000000000000000000000000000000000000000..b483d5158ca5eeeff6f385b1a94990f9e5f6e871 --- /dev/null +++ b/modules/launch_util.py @@ -0,0 +1,103 @@ +import os +import importlib +import importlib.util +import subprocess +import sys +import re +import logging +import importlib.metadata +import packaging.version +from packaging.requirements import Requirement + + + + +logging.getLogger("torch.distributed.nn").setLevel(logging.ERROR) # sshh... +logging.getLogger("xformers").addFilter(lambda record: 'A matching Triton is not available' not in record.getMessage()) + +re_requirement = re.compile(r"\s*([-\w]+)\s*(?:==\s*([-+.\w]+))?\s*") + +python = sys.executable +default_command_live = (os.environ.get('LAUNCH_LIVE_OUTPUT') == "1") +index_url = os.environ.get('INDEX_URL', "") + +modules_path = os.path.dirname(os.path.realpath(__file__)) +script_path = os.path.dirname(modules_path) + + +def is_installed(package): + try: + spec = importlib.util.find_spec(package) + except ModuleNotFoundError: + return False + + return spec is not None + + +def run(command, desc=None, errdesc=None, custom_env=None, live: bool = default_command_live) -> str: + if desc is not None: + print(desc) + + run_kwargs = { + "args": command, + "shell": True, + "env": os.environ if custom_env is None else custom_env, + "encoding": 'utf8', + "errors": 'ignore', + } + + if not live: + run_kwargs["stdout"] = run_kwargs["stderr"] = subprocess.PIPE + + result = subprocess.run(**run_kwargs) + + if result.returncode != 0: + error_bits = [ + f"{errdesc or 'Error running command'}.", + f"Command: {command}", + f"Error code: {result.returncode}", + ] + if result.stdout: + error_bits.append(f"stdout: {result.stdout}") + if result.stderr: + error_bits.append(f"stderr: {result.stderr}") + raise RuntimeError("\n".join(error_bits)) + + return (result.stdout or "") + + +def run_pip(command, desc=None, live=default_command_live): + try: + index_url_line = f' --index-url {index_url}' if index_url != '' else '' + return run(f'"{python}" -m pip {command} --prefer-binary{index_url_line}', desc=f"Installing {desc}", + errdesc=f"Couldn't install {desc}", live=live) + except Exception as e: + print(e) + print(f'CMD Failed {desc}: {command}') + return None + + +def requirements_met(requirements_file): + with open(requirements_file, "r", encoding="utf8") as file: + for line in file: + line = line.strip() + if line == "" or line.startswith('#'): + continue + + requirement = Requirement(line) + package = requirement.name + + try: + version_installed = importlib.metadata.version(package) + installed_version = packaging.version.parse(version_installed) + + # Check if the installed version satisfies the requirement + if installed_version not in requirement.specifier: + print(f"Version mismatch for {package}: Installed version {version_installed} does not meet requirement {requirement}") + return False + except Exception as e: + print(f"Error checking version for {package}: {e}") + return False + + return True + diff --git a/modules/localization.py b/modules/localization.py new file mode 100644 index 0000000000000000000000000000000000000000..b21d4a564d134ac0be00d83c7005627d601d206e --- /dev/null +++ b/modules/localization.py @@ -0,0 +1,60 @@ +import json +import os + + +current_translation = {} +localization_root = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'language') + + +def localization_js(filename): + global current_translation + + if isinstance(filename, str): + full_name = os.path.abspath(os.path.join(localization_root, filename + '.json')) + if os.path.exists(full_name): + try: + with open(full_name, encoding='utf-8') as f: + current_translation = json.load(f) + assert isinstance(current_translation, dict) + for k, v in current_translation.items(): + assert isinstance(k, str) + assert isinstance(v, str) + except Exception as e: + print(str(e)) + print(f'Failed to load localization file {full_name}') + + # current_translation = {k: 'XXX' for k in current_translation.keys()} # use this to see if all texts are covered + + return f"window.localization = {json.dumps(current_translation)}" + + +def dump_english_config(components): + all_texts = [] + for c in components: + label = getattr(c, 'label', None) + value = getattr(c, 'value', None) + choices = getattr(c, 'choices', None) + info = getattr(c, 'info', None) + + if isinstance(label, str): + all_texts.append(label) + if isinstance(value, str): + all_texts.append(value) + if isinstance(info, str): + all_texts.append(info) + if isinstance(choices, list): + for x in choices: + if isinstance(x, str): + all_texts.append(x) + if isinstance(x, tuple): + for y in x: + if isinstance(y, str): + all_texts.append(y) + + config_dict = {k: k for k in all_texts if k != "" and 'progress-container' not in k} + full_name = os.path.abspath(os.path.join(localization_root, 'en.json')) + + with open(full_name, "w", encoding="utf-8") as json_file: + json.dump(config_dict, json_file, indent=4) + + return diff --git a/modules/lora.py b/modules/lora.py new file mode 100644 index 0000000000000000000000000000000000000000..088545c708943aa8e51e8b2bfd32b2a9968b639f --- /dev/null +++ b/modules/lora.py @@ -0,0 +1,152 @@ +def match_lora(lora, to_load): + patch_dict = {} + loaded_keys = set() + for x in to_load: + real_load_key = to_load[x] + if real_load_key in lora: + patch_dict[real_load_key] = ('fooocus', lora[real_load_key]) + loaded_keys.add(real_load_key) + continue + + alpha_name = "{}.alpha".format(x) + alpha = None + if alpha_name in lora.keys(): + alpha = lora[alpha_name].item() + loaded_keys.add(alpha_name) + + regular_lora = "{}.lora_up.weight".format(x) + diffusers_lora = "{}_lora.up.weight".format(x) + transformers_lora = "{}.lora_linear_layer.up.weight".format(x) + A_name = None + + if regular_lora in lora.keys(): + A_name = regular_lora + B_name = "{}.lora_down.weight".format(x) + mid_name = "{}.lora_mid.weight".format(x) + elif diffusers_lora in lora.keys(): + A_name = diffusers_lora + B_name = "{}_lora.down.weight".format(x) + mid_name = None + elif transformers_lora in lora.keys(): + A_name = transformers_lora + B_name ="{}.lora_linear_layer.down.weight".format(x) + mid_name = None + + if A_name is not None: + mid = None + if mid_name is not None and mid_name in lora.keys(): + mid = lora[mid_name] + loaded_keys.add(mid_name) + patch_dict[to_load[x]] = ("lora", (lora[A_name], lora[B_name], alpha, mid)) + loaded_keys.add(A_name) + loaded_keys.add(B_name) + + + ######## loha + hada_w1_a_name = "{}.hada_w1_a".format(x) + hada_w1_b_name = "{}.hada_w1_b".format(x) + hada_w2_a_name = "{}.hada_w2_a".format(x) + hada_w2_b_name = "{}.hada_w2_b".format(x) + hada_t1_name = "{}.hada_t1".format(x) + hada_t2_name = "{}.hada_t2".format(x) + if hada_w1_a_name in lora.keys(): + hada_t1 = None + hada_t2 = None + if hada_t1_name in lora.keys(): + hada_t1 = lora[hada_t1_name] + hada_t2 = lora[hada_t2_name] + loaded_keys.add(hada_t1_name) + loaded_keys.add(hada_t2_name) + + patch_dict[to_load[x]] = ("loha", (lora[hada_w1_a_name], lora[hada_w1_b_name], alpha, lora[hada_w2_a_name], lora[hada_w2_b_name], hada_t1, hada_t2)) + loaded_keys.add(hada_w1_a_name) + loaded_keys.add(hada_w1_b_name) + loaded_keys.add(hada_w2_a_name) + loaded_keys.add(hada_w2_b_name) + + + ######## lokr + lokr_w1_name = "{}.lokr_w1".format(x) + lokr_w2_name = "{}.lokr_w2".format(x) + lokr_w1_a_name = "{}.lokr_w1_a".format(x) + lokr_w1_b_name = "{}.lokr_w1_b".format(x) + lokr_t2_name = "{}.lokr_t2".format(x) + lokr_w2_a_name = "{}.lokr_w2_a".format(x) + lokr_w2_b_name = "{}.lokr_w2_b".format(x) + + lokr_w1 = None + if lokr_w1_name in lora.keys(): + lokr_w1 = lora[lokr_w1_name] + loaded_keys.add(lokr_w1_name) + + lokr_w2 = None + if lokr_w2_name in lora.keys(): + lokr_w2 = lora[lokr_w2_name] + loaded_keys.add(lokr_w2_name) + + lokr_w1_a = None + if lokr_w1_a_name in lora.keys(): + lokr_w1_a = lora[lokr_w1_a_name] + loaded_keys.add(lokr_w1_a_name) + + lokr_w1_b = None + if lokr_w1_b_name in lora.keys(): + lokr_w1_b = lora[lokr_w1_b_name] + loaded_keys.add(lokr_w1_b_name) + + lokr_w2_a = None + if lokr_w2_a_name in lora.keys(): + lokr_w2_a = lora[lokr_w2_a_name] + loaded_keys.add(lokr_w2_a_name) + + lokr_w2_b = None + if lokr_w2_b_name in lora.keys(): + lokr_w2_b = lora[lokr_w2_b_name] + loaded_keys.add(lokr_w2_b_name) + + lokr_t2 = None + if lokr_t2_name in lora.keys(): + lokr_t2 = lora[lokr_t2_name] + loaded_keys.add(lokr_t2_name) + + if (lokr_w1 is not None) or (lokr_w2 is not None) or (lokr_w1_a is not None) or (lokr_w2_a is not None): + patch_dict[to_load[x]] = ("lokr", (lokr_w1, lokr_w2, alpha, lokr_w1_a, lokr_w1_b, lokr_w2_a, lokr_w2_b, lokr_t2)) + + #glora + a1_name = "{}.a1.weight".format(x) + a2_name = "{}.a2.weight".format(x) + b1_name = "{}.b1.weight".format(x) + b2_name = "{}.b2.weight".format(x) + if a1_name in lora: + patch_dict[to_load[x]] = ("glora", (lora[a1_name], lora[a2_name], lora[b1_name], lora[b2_name], alpha)) + loaded_keys.add(a1_name) + loaded_keys.add(a2_name) + loaded_keys.add(b1_name) + loaded_keys.add(b2_name) + + w_norm_name = "{}.w_norm".format(x) + b_norm_name = "{}.b_norm".format(x) + w_norm = lora.get(w_norm_name, None) + b_norm = lora.get(b_norm_name, None) + + if w_norm is not None: + loaded_keys.add(w_norm_name) + patch_dict[to_load[x]] = ("diff", (w_norm,)) + if b_norm is not None: + loaded_keys.add(b_norm_name) + patch_dict["{}.bias".format(to_load[x][:-len(".weight")])] = ("diff", (b_norm,)) + + diff_name = "{}.diff".format(x) + diff_weight = lora.get(diff_name, None) + if diff_weight is not None: + patch_dict[to_load[x]] = ("diff", (diff_weight,)) + loaded_keys.add(diff_name) + + diff_bias_name = "{}.diff_b".format(x) + diff_bias = lora.get(diff_bias_name, None) + if diff_bias is not None: + patch_dict["{}.bias".format(to_load[x][:-len(".weight")])] = ("diff", (diff_bias,)) + loaded_keys.add(diff_bias_name) + + remaining_dict = {x: y for x, y in lora.items() if x not in loaded_keys} + return patch_dict, remaining_dict diff --git a/modules/meta_parser.py b/modules/meta_parser.py new file mode 100644 index 0000000000000000000000000000000000000000..546c093fa008da831fb17b0e6a2cc256467315b2 --- /dev/null +++ b/modules/meta_parser.py @@ -0,0 +1,573 @@ +import json +import os +import re +from abc import ABC, abstractmethod +from pathlib import Path + +import gradio as gr +from PIL import Image + +import fooocus_version +import modules.config +import modules.sdxl_styles +from modules.flags import MetadataScheme, Performance, Steps +from modules.flags import SAMPLERS, CIVITAI_NO_KARRAS +from modules.util import quote, unquote, extract_styles_from_prompt, is_json, get_file_from_folder_list, calculate_sha256 + +re_param_code = r'\s*(\w[\w \-/]+):\s*("(?:\\.|[^\\"])+"|[^,]*)(?:,|$)' +re_param = re.compile(re_param_code) +re_imagesize = re.compile(r"^(\d+)x(\d+)$") + +hash_cache = {} + + +def load_parameter_button_click(raw_metadata: dict | str, is_generating: bool): + loaded_parameter_dict = raw_metadata + if isinstance(raw_metadata, str): + loaded_parameter_dict = json.loads(raw_metadata) + assert isinstance(loaded_parameter_dict, dict) + + results = [len(loaded_parameter_dict) > 0, 1] + + get_str('prompt', 'Prompt', loaded_parameter_dict, results) + get_str('negative_prompt', 'Negative Prompt', loaded_parameter_dict, results) + get_list('styles', 'Styles', loaded_parameter_dict, results) + get_str('performance', 'Performance', loaded_parameter_dict, results) + get_steps('steps', 'Steps', loaded_parameter_dict, results) + get_float('overwrite_switch', 'Overwrite Switch', loaded_parameter_dict, results) + get_resolution('resolution', 'Resolution', loaded_parameter_dict, results) + get_float('guidance_scale', 'Guidance Scale', loaded_parameter_dict, results) + get_float('sharpness', 'Sharpness', loaded_parameter_dict, results) + get_adm_guidance('adm_guidance', 'ADM Guidance', loaded_parameter_dict, results) + get_str('refiner_swap_method', 'Refiner Swap Method', loaded_parameter_dict, results) + get_float('adaptive_cfg', 'CFG Mimicking from TSNR', loaded_parameter_dict, results) + get_str('base_model', 'Base Model', loaded_parameter_dict, results) + get_str('refiner_model', 'Refiner Model', loaded_parameter_dict, results) + get_float('refiner_switch', 'Refiner Switch', loaded_parameter_dict, results) + get_str('sampler', 'Sampler', loaded_parameter_dict, results) + get_str('scheduler', 'Scheduler', loaded_parameter_dict, results) + get_seed('seed', 'Seed', loaded_parameter_dict, results) + + if is_generating: + results.append(gr.update()) + else: + results.append(gr.update(visible=True)) + + results.append(gr.update(visible=False)) + + get_freeu('freeu', 'FreeU', loaded_parameter_dict, results) + + for i in range(modules.config.default_max_lora_number): + get_lora(f'lora_combined_{i + 1}', f'LoRA {i + 1}', loaded_parameter_dict, results) + + return results + + +def get_str(key: str, fallback: str | None, source_dict: dict, results: list, default=None): + try: + h = source_dict.get(key, source_dict.get(fallback, default)) + assert isinstance(h, str) + results.append(h) + except: + results.append(gr.update()) + + +def get_list(key: str, fallback: str | None, source_dict: dict, results: list, default=None): + try: + h = source_dict.get(key, source_dict.get(fallback, default)) + h = eval(h) + assert isinstance(h, list) + results.append(h) + except: + results.append(gr.update()) + + +def get_float(key: str, fallback: str | None, source_dict: dict, results: list, default=None): + try: + h = source_dict.get(key, source_dict.get(fallback, default)) + assert h is not None + h = float(h) + results.append(h) + except: + results.append(gr.update()) + + +def get_steps(key: str, fallback: str | None, source_dict: dict, results: list, default=None): + try: + h = source_dict.get(key, source_dict.get(fallback, default)) + assert h is not None + h = int(h) + # if not in steps or in steps and performance is not the same + if h not in iter(Steps) or Steps(h).name.casefold() != source_dict.get('performance', '').replace(' ', '_').casefold(): + results.append(h) + return + results.append(-1) + except: + results.append(-1) + + +def get_resolution(key: str, fallback: str | None, source_dict: dict, results: list, default=None): + try: + h = source_dict.get(key, source_dict.get(fallback, default)) + width, height = eval(h) + formatted = modules.config.add_ratio(f'{width}*{height}') + if formatted in modules.config.available_aspect_ratios: + results.append(formatted) + results.append(-1) + results.append(-1) + else: + results.append(gr.update()) + results.append(int(width)) + results.append(int(height)) + except: + results.append(gr.update()) + results.append(gr.update()) + results.append(gr.update()) + + +def get_seed(key: str, fallback: str | None, source_dict: dict, results: list, default=None): + try: + h = source_dict.get(key, source_dict.get(fallback, default)) + assert h is not None + h = int(h) + results.append(False) + results.append(h) + except: + results.append(gr.update()) + results.append(gr.update()) + + +def get_adm_guidance(key: str, fallback: str | None, source_dict: dict, results: list, default=None): + try: + h = source_dict.get(key, source_dict.get(fallback, default)) + p, n, e = eval(h) + results.append(float(p)) + results.append(float(n)) + results.append(float(e)) + except: + results.append(gr.update()) + results.append(gr.update()) + results.append(gr.update()) + + +def get_freeu(key: str, fallback: str | None, source_dict: dict, results: list, default=None): + try: + h = source_dict.get(key, source_dict.get(fallback, default)) + b1, b2, s1, s2 = eval(h) + results.append(True) + results.append(float(b1)) + results.append(float(b2)) + results.append(float(s1)) + results.append(float(s2)) + except: + results.append(False) + results.append(gr.update()) + results.append(gr.update()) + results.append(gr.update()) + results.append(gr.update()) + + +def get_lora(key: str, fallback: str | None, source_dict: dict, results: list): + try: + n, w = source_dict.get(key, source_dict.get(fallback)).split(' : ') + w = float(w) + results.append(True) + results.append(n) + results.append(w) + except: + results.append(True) + results.append('None') + results.append(1) + + +def get_sha256(filepath): + global hash_cache + if filepath not in hash_cache: + hash_cache[filepath] = calculate_sha256(filepath) + + return hash_cache[filepath] + + +def parse_meta_from_preset(preset_content): + assert isinstance(preset_content, dict) + preset_prepared = {} + items = preset_content + + for settings_key, meta_key in modules.config.possible_preset_keys.items(): + if settings_key == "default_loras": + loras = getattr(modules.config, settings_key) + if settings_key in items: + loras = items[settings_key] + for index, lora in enumerate(loras[:5]): + preset_prepared[f'lora_combined_{index + 1}'] = ' : '.join(map(str, lora)) + elif settings_key == "default_aspect_ratio": + if settings_key in items and items[settings_key] is not None: + default_aspect_ratio = items[settings_key] + width, height = default_aspect_ratio.split('*') + else: + default_aspect_ratio = getattr(modules.config, settings_key) + width, height = default_aspect_ratio.split('×') + height = height[:height.index(" ")] + preset_prepared[meta_key] = (width, height) + else: + preset_prepared[meta_key] = items[settings_key] if settings_key in items and items[ + settings_key] is not None else getattr(modules.config, settings_key) + + if settings_key == "default_styles" or settings_key == "default_aspect_ratio": + preset_prepared[meta_key] = str(preset_prepared[meta_key]) + + return preset_prepared + + +class MetadataParser(ABC): + def __init__(self): + self.raw_prompt: str = '' + self.full_prompt: str = '' + self.raw_negative_prompt: str = '' + self.full_negative_prompt: str = '' + self.steps: int = 30 + self.base_model_name: str = '' + self.base_model_hash: str = '' + self.refiner_model_name: str = '' + self.refiner_model_hash: str = '' + self.loras: list = [] + + @abstractmethod + def get_scheme(self) -> MetadataScheme: + raise NotImplementedError + + @abstractmethod + def parse_json(self, metadata: dict | str) -> dict: + raise NotImplementedError + + @abstractmethod + def parse_string(self, metadata: dict) -> str: + raise NotImplementedError + + def set_data(self, raw_prompt, full_prompt, raw_negative_prompt, full_negative_prompt, steps, base_model_name, + refiner_model_name, loras): + self.raw_prompt = raw_prompt + self.full_prompt = full_prompt + self.raw_negative_prompt = raw_negative_prompt + self.full_negative_prompt = full_negative_prompt + self.steps = steps + self.base_model_name = Path(base_model_name).stem + + base_model_path = get_file_from_folder_list(base_model_name, modules.config.paths_checkpoints) + self.base_model_hash = get_sha256(base_model_path) + + if refiner_model_name not in ['', 'None']: + self.refiner_model_name = Path(refiner_model_name).stem + refiner_model_path = get_file_from_folder_list(refiner_model_name, modules.config.paths_checkpoints) + self.refiner_model_hash = get_sha256(refiner_model_path) + + self.loras = [] + for (lora_name, lora_weight) in loras: + if lora_name != 'None': + lora_path = get_file_from_folder_list(lora_name, modules.config.paths_loras) + lora_hash = get_sha256(lora_path) + self.loras.append((Path(lora_name).stem, lora_weight, lora_hash)) + + +class A1111MetadataParser(MetadataParser): + def get_scheme(self) -> MetadataScheme: + return MetadataScheme.A1111 + + fooocus_to_a1111 = { + 'raw_prompt': 'Raw prompt', + 'raw_negative_prompt': 'Raw negative prompt', + 'negative_prompt': 'Negative prompt', + 'styles': 'Styles', + 'performance': 'Performance', + 'steps': 'Steps', + 'sampler': 'Sampler', + 'scheduler': 'Scheduler', + 'guidance_scale': 'CFG scale', + 'seed': 'Seed', + 'resolution': 'Size', + 'sharpness': 'Sharpness', + 'adm_guidance': 'ADM Guidance', + 'refiner_swap_method': 'Refiner Swap Method', + 'adaptive_cfg': 'Adaptive CFG', + 'overwrite_switch': 'Overwrite Switch', + 'freeu': 'FreeU', + 'base_model': 'Model', + 'base_model_hash': 'Model hash', + 'refiner_model': 'Refiner', + 'refiner_model_hash': 'Refiner hash', + 'lora_hashes': 'Lora hashes', + 'lora_weights': 'Lora weights', + 'created_by': 'User', + 'version': 'Version' + } + + def parse_json(self, metadata: str) -> dict: + metadata_prompt = '' + metadata_negative_prompt = '' + + done_with_prompt = False + + *lines, lastline = metadata.strip().split("\n") + if len(re_param.findall(lastline)) < 3: + lines.append(lastline) + lastline = '' + + for line in lines: + line = line.strip() + if line.startswith(f"{self.fooocus_to_a1111['negative_prompt']}:"): + done_with_prompt = True + line = line[len(f"{self.fooocus_to_a1111['negative_prompt']}:"):].strip() + if done_with_prompt: + metadata_negative_prompt += ('' if metadata_negative_prompt == '' else "\n") + line + else: + metadata_prompt += ('' if metadata_prompt == '' else "\n") + line + + found_styles, prompt, negative_prompt = extract_styles_from_prompt(metadata_prompt, metadata_negative_prompt) + + data = { + 'prompt': prompt, + 'negative_prompt': negative_prompt + } + + for k, v in re_param.findall(lastline): + try: + if v != '' and v[0] == '"' and v[-1] == '"': + v = unquote(v) + + m = re_imagesize.match(v) + if m is not None: + data['resolution'] = str((m.group(1), m.group(2))) + else: + data[list(self.fooocus_to_a1111.keys())[list(self.fooocus_to_a1111.values()).index(k)]] = v + except Exception: + print(f"Error parsing \"{k}: {v}\"") + + # workaround for multiline prompts + if 'raw_prompt' in data: + data['prompt'] = data['raw_prompt'] + raw_prompt = data['raw_prompt'].replace("\n", ', ') + if metadata_prompt != raw_prompt and modules.sdxl_styles.fooocus_expansion not in found_styles: + found_styles.append(modules.sdxl_styles.fooocus_expansion) + + if 'raw_negative_prompt' in data: + data['negative_prompt'] = data['raw_negative_prompt'] + + data['styles'] = str(found_styles) + + # try to load performance based on steps, fallback for direct A1111 imports + if 'steps' in data and 'performance' not in data: + try: + data['performance'] = Performance[Steps(int(data['steps'])).name].value + except ValueError | KeyError: + pass + + if 'sampler' in data: + data['sampler'] = data['sampler'].replace(' Karras', '') + # get key + for k, v in SAMPLERS.items(): + if v == data['sampler']: + data['sampler'] = k + break + + for key in ['base_model', 'refiner_model']: + if key in data: + for filename in modules.config.model_filenames: + path = Path(filename) + if data[key] == path.stem: + data[key] = filename + break + + if 'lora_hashes' in data: + lora_filenames = modules.config.lora_filenames.copy() + if modules.config.sdxl_lcm_lora in lora_filenames: + lora_filenames.remove(modules.config.sdxl_lcm_lora) + for li, lora in enumerate(data['lora_hashes'].split(', ')): + lora_name, lora_hash, lora_weight = lora.split(': ') + for filename in lora_filenames: + path = Path(filename) + if lora_name == path.stem: + data[f'lora_combined_{li + 1}'] = f'{filename} : {lora_weight}' + break + + return data + + def parse_string(self, metadata: dict) -> str: + data = {k: v for _, k, v in metadata} + + width, height = eval(data['resolution']) + + sampler = data['sampler'] + scheduler = data['scheduler'] + if sampler in SAMPLERS and SAMPLERS[sampler] != '': + sampler = SAMPLERS[sampler] + if sampler not in CIVITAI_NO_KARRAS and scheduler == 'karras': + sampler += f' Karras' + + generation_params = { + self.fooocus_to_a1111['steps']: self.steps, + self.fooocus_to_a1111['sampler']: sampler, + self.fooocus_to_a1111['seed']: data['seed'], + self.fooocus_to_a1111['resolution']: f'{width}x{height}', + self.fooocus_to_a1111['guidance_scale']: data['guidance_scale'], + self.fooocus_to_a1111['sharpness']: data['sharpness'], + self.fooocus_to_a1111['adm_guidance']: data['adm_guidance'], + self.fooocus_to_a1111['base_model']: Path(data['base_model']).stem, + self.fooocus_to_a1111['base_model_hash']: self.base_model_hash, + + self.fooocus_to_a1111['performance']: data['performance'], + self.fooocus_to_a1111['scheduler']: scheduler, + # workaround for multiline prompts + self.fooocus_to_a1111['raw_prompt']: self.raw_prompt, + self.fooocus_to_a1111['raw_negative_prompt']: self.raw_negative_prompt, + } + + if self.refiner_model_name not in ['', 'None']: + generation_params |= { + self.fooocus_to_a1111['refiner_model']: self.refiner_model_name, + self.fooocus_to_a1111['refiner_model_hash']: self.refiner_model_hash + } + + for key in ['adaptive_cfg', 'overwrite_switch', 'refiner_swap_method', 'freeu']: + if key in data: + generation_params[self.fooocus_to_a1111[key]] = data[key] + + lora_hashes = [] + for index, (lora_name, lora_weight, lora_hash) in enumerate(self.loras): + # workaround for Fooocus not knowing LoRA name in LoRA metadata + lora_hashes.append(f'{lora_name}: {lora_hash}: {lora_weight}') + lora_hashes_string = ', '.join(lora_hashes) + + generation_params |= { + self.fooocus_to_a1111['lora_hashes']: lora_hashes_string, + self.fooocus_to_a1111['version']: data['version'] + } + + if modules.config.metadata_created_by != '': + generation_params[self.fooocus_to_a1111['created_by']] = modules.config.metadata_created_by + + generation_params_text = ", ".join( + [k if k == v else f'{k}: {quote(v)}' for k, v in generation_params.items() if + v is not None]) + positive_prompt_resolved = ', '.join(self.full_prompt) + negative_prompt_resolved = ', '.join(self.full_negative_prompt) + negative_prompt_text = f"\nNegative prompt: {negative_prompt_resolved}" if negative_prompt_resolved else "" + return f"{positive_prompt_resolved}{negative_prompt_text}\n{generation_params_text}".strip() + + +class FooocusMetadataParser(MetadataParser): + def get_scheme(self) -> MetadataScheme: + return MetadataScheme.FOOOCUS + + def parse_json(self, metadata: dict) -> dict: + model_filenames = modules.config.model_filenames.copy() + lora_filenames = modules.config.lora_filenames.copy() + if modules.config.sdxl_lcm_lora in lora_filenames: + lora_filenames.remove(modules.config.sdxl_lcm_lora) + + for key, value in metadata.items(): + if value in ['', 'None']: + continue + if key in ['base_model', 'refiner_model']: + metadata[key] = self.replace_value_with_filename(key, value, model_filenames) + elif key.startswith('lora_combined_'): + metadata[key] = self.replace_value_with_filename(key, value, lora_filenames) + else: + continue + + return metadata + + def parse_string(self, metadata: list) -> str: + for li, (label, key, value) in enumerate(metadata): + # remove model folder paths from metadata + if key.startswith('lora_combined_'): + name, weight = value.split(' : ') + name = Path(name).stem + value = f'{name} : {weight}' + metadata[li] = (label, key, value) + + res = {k: v for _, k, v in metadata} + + res['full_prompt'] = self.full_prompt + res['full_negative_prompt'] = self.full_negative_prompt + res['steps'] = self.steps + res['base_model'] = self.base_model_name + res['base_model_hash'] = self.base_model_hash + + if self.refiner_model_name not in ['', 'None']: + res['refiner_model'] = self.refiner_model_name + res['refiner_model_hash'] = self.refiner_model_hash + + res['loras'] = self.loras + + if modules.config.metadata_created_by != '': + res['created_by'] = modules.config.metadata_created_by + + return json.dumps(dict(sorted(res.items()))) + + @staticmethod + def replace_value_with_filename(key, value, filenames): + for filename in filenames: + path = Path(filename) + if key.startswith('lora_combined_'): + name, weight = value.split(' : ') + if name == path.stem: + return f'{filename} : {weight}' + elif value == path.stem: + return filename + + +def get_metadata_parser(metadata_scheme: MetadataScheme) -> MetadataParser: + match metadata_scheme: + case MetadataScheme.FOOOCUS: + return FooocusMetadataParser() + case MetadataScheme.A1111: + return A1111MetadataParser() + case _: + raise NotImplementedError + + +def read_info_from_image(filepath) -> tuple[str | None, MetadataScheme | None]: + with Image.open(filepath) as image: + items = (image.info or {}).copy() + + parameters = items.pop('parameters', None) + metadata_scheme = items.pop('fooocus_scheme', None) + exif = items.pop('exif', None) + + if parameters is not None and is_json(parameters): + parameters = json.loads(parameters) + elif exif is not None: + exif = image.getexif() + # 0x9286 = UserComment + parameters = exif.get(0x9286, None) + # 0x927C = MakerNote + metadata_scheme = exif.get(0x927C, None) + + if is_json(parameters): + parameters = json.loads(parameters) + + try: + metadata_scheme = MetadataScheme(metadata_scheme) + except ValueError: + metadata_scheme = None + + # broad fallback + if isinstance(parameters, dict): + metadata_scheme = MetadataScheme.FOOOCUS + + if isinstance(parameters, str): + metadata_scheme = MetadataScheme.A1111 + + return parameters, metadata_scheme + + +def get_exif(metadata: str | None, metadata_scheme: str): + exif = Image.Exif() + # tags see see https://github.com/python-pillow/Pillow/blob/9.2.x/src/PIL/ExifTags.py + # 0x9286 = UserComment + exif[0x9286] = metadata + # 0x0131 = Software + exif[0x0131] = 'Fooocus v' + fooocus_version.version + # 0x927C = MakerNote + exif[0x927C] = metadata_scheme + return exif \ No newline at end of file diff --git a/modules/model_loader.py b/modules/model_loader.py new file mode 100644 index 0000000000000000000000000000000000000000..8ba336a915ae234b7cd5f9f2576d4edf779738ba --- /dev/null +++ b/modules/model_loader.py @@ -0,0 +1,26 @@ +import os +from urllib.parse import urlparse +from typing import Optional + + +def load_file_from_url( + url: str, + *, + model_dir: str, + progress: bool = True, + file_name: Optional[str] = None, +) -> str: + """Download a file from `url` into `model_dir`, using the file present if possible. + + Returns the path to the downloaded file. + """ + os.makedirs(model_dir, exist_ok=True) + if not file_name: + parts = urlparse(url) + file_name = os.path.basename(parts.path) + cached_file = os.path.abspath(os.path.join(model_dir, file_name)) + if not os.path.exists(cached_file): + print(f'Downloading: "{url}" to {cached_file}\n') + from torch.hub import download_url_to_file + download_url_to_file(url, cached_file, progress=progress) + return cached_file diff --git a/modules/ops.py b/modules/ops.py new file mode 100644 index 0000000000000000000000000000000000000000..ee0e775634314d1b71811258cff87b2178e1c740 --- /dev/null +++ b/modules/ops.py @@ -0,0 +1,19 @@ +import torch +import contextlib + + +@contextlib.contextmanager +def use_patched_ops(operations): + op_names = ['Linear', 'Conv2d', 'Conv3d', 'GroupNorm', 'LayerNorm'] + backups = {op_name: getattr(torch.nn, op_name) for op_name in op_names} + + try: + for op_name in op_names: + setattr(torch.nn, op_name, getattr(operations, op_name)) + + yield + + finally: + for op_name in op_names: + setattr(torch.nn, op_name, backups[op_name]) + return diff --git a/modules/patch.py b/modules/patch.py new file mode 100644 index 0000000000000000000000000000000000000000..3c2dd8f477902e68a467e8f89888934a762f4bb1 --- /dev/null +++ b/modules/patch.py @@ -0,0 +1,513 @@ +import os +import torch +import time +import math +import ldm_patched.modules.model_base +import ldm_patched.ldm.modules.diffusionmodules.openaimodel +import ldm_patched.modules.model_management +import modules.anisotropic as anisotropic +import ldm_patched.ldm.modules.attention +import ldm_patched.k_diffusion.sampling +import ldm_patched.modules.sd1_clip +import modules.inpaint_worker as inpaint_worker +import ldm_patched.ldm.modules.diffusionmodules.openaimodel +import ldm_patched.ldm.modules.diffusionmodules.model +import ldm_patched.modules.sd +import ldm_patched.controlnet.cldm +import ldm_patched.modules.model_patcher +import ldm_patched.modules.samplers +import ldm_patched.modules.args_parser +import warnings +import safetensors.torch +import modules.constants as constants + +from ldm_patched.modules.samplers import calc_cond_uncond_batch +from ldm_patched.k_diffusion.sampling import BatchedBrownianTree +from ldm_patched.ldm.modules.diffusionmodules.openaimodel import forward_timestep_embed, apply_control +from modules.patch_precision import patch_all_precision +from modules.patch_clip import patch_all_clip + + +class PatchSettings: + def __init__(self, + sharpness=2.0, + adm_scaler_end=0.3, + positive_adm_scale=1.5, + negative_adm_scale=0.8, + controlnet_softness=0.25, + adaptive_cfg=7.0): + self.sharpness = sharpness + self.adm_scaler_end = adm_scaler_end + self.positive_adm_scale = positive_adm_scale + self.negative_adm_scale = negative_adm_scale + self.controlnet_softness = controlnet_softness + self.adaptive_cfg = adaptive_cfg + self.global_diffusion_progress = 0 + self.eps_record = None + + +patch_settings = {} + + +def calculate_weight_patched(self, patches, weight, key): + for p in patches: + alpha = p[0] + v = p[1] + strength_model = p[2] + + if strength_model != 1.0: + weight *= strength_model + + if isinstance(v, list): + v = (self.calculate_weight(v[1:], v[0].clone(), key),) + + if len(v) == 1: + patch_type = "diff" + elif len(v) == 2: + patch_type = v[0] + v = v[1] + + if patch_type == "diff": + w1 = v[0] + if alpha != 0.0: + if w1.shape != weight.shape: + print("WARNING SHAPE MISMATCH {} WEIGHT NOT MERGED {} != {}".format(key, w1.shape, weight.shape)) + else: + weight += alpha * ldm_patched.modules.model_management.cast_to_device(w1, weight.device, weight.dtype) + elif patch_type == "lora": + mat1 = ldm_patched.modules.model_management.cast_to_device(v[0], weight.device, torch.float32) + mat2 = ldm_patched.modules.model_management.cast_to_device(v[1], weight.device, torch.float32) + if v[2] is not None: + alpha *= v[2] / mat2.shape[0] + if v[3] is not None: + mat3 = ldm_patched.modules.model_management.cast_to_device(v[3], weight.device, torch.float32) + final_shape = [mat2.shape[1], mat2.shape[0], mat3.shape[2], mat3.shape[3]] + mat2 = torch.mm(mat2.transpose(0, 1).flatten(start_dim=1), + mat3.transpose(0, 1).flatten(start_dim=1)).reshape(final_shape).transpose(0, 1) + try: + weight += (alpha * torch.mm(mat1.flatten(start_dim=1), mat2.flatten(start_dim=1))).reshape( + weight.shape).type(weight.dtype) + except Exception as e: + print("ERROR", key, e) + elif patch_type == "fooocus": + w1 = ldm_patched.modules.model_management.cast_to_device(v[0], weight.device, torch.float32) + w_min = ldm_patched.modules.model_management.cast_to_device(v[1], weight.device, torch.float32) + w_max = ldm_patched.modules.model_management.cast_to_device(v[2], weight.device, torch.float32) + w1 = (w1 / 255.0) * (w_max - w_min) + w_min + if alpha != 0.0: + if w1.shape != weight.shape: + print("WARNING SHAPE MISMATCH {} FOOOCUS WEIGHT NOT MERGED {} != {}".format(key, w1.shape, weight.shape)) + else: + weight += alpha * ldm_patched.modules.model_management.cast_to_device(w1, weight.device, weight.dtype) + elif patch_type == "lokr": + w1 = v[0] + w2 = v[1] + w1_a = v[3] + w1_b = v[4] + w2_a = v[5] + w2_b = v[6] + t2 = v[7] + dim = None + + if w1 is None: + dim = w1_b.shape[0] + w1 = torch.mm(ldm_patched.modules.model_management.cast_to_device(w1_a, weight.device, torch.float32), + ldm_patched.modules.model_management.cast_to_device(w1_b, weight.device, torch.float32)) + else: + w1 = ldm_patched.modules.model_management.cast_to_device(w1, weight.device, torch.float32) + + if w2 is None: + dim = w2_b.shape[0] + if t2 is None: + w2 = torch.mm(ldm_patched.modules.model_management.cast_to_device(w2_a, weight.device, torch.float32), + ldm_patched.modules.model_management.cast_to_device(w2_b, weight.device, torch.float32)) + else: + w2 = torch.einsum('i j k l, j r, i p -> p r k l', + ldm_patched.modules.model_management.cast_to_device(t2, weight.device, torch.float32), + ldm_patched.modules.model_management.cast_to_device(w2_b, weight.device, torch.float32), + ldm_patched.modules.model_management.cast_to_device(w2_a, weight.device, torch.float32)) + else: + w2 = ldm_patched.modules.model_management.cast_to_device(w2, weight.device, torch.float32) + + if len(w2.shape) == 4: + w1 = w1.unsqueeze(2).unsqueeze(2) + if v[2] is not None and dim is not None: + alpha *= v[2] / dim + + try: + weight += alpha * torch.kron(w1, w2).reshape(weight.shape).type(weight.dtype) + except Exception as e: + print("ERROR", key, e) + elif patch_type == "loha": + w1a = v[0] + w1b = v[1] + if v[2] is not None: + alpha *= v[2] / w1b.shape[0] + w2a = v[3] + w2b = v[4] + if v[5] is not None: # cp decomposition + t1 = v[5] + t2 = v[6] + m1 = torch.einsum('i j k l, j r, i p -> p r k l', + ldm_patched.modules.model_management.cast_to_device(t1, weight.device, torch.float32), + ldm_patched.modules.model_management.cast_to_device(w1b, weight.device, torch.float32), + ldm_patched.modules.model_management.cast_to_device(w1a, weight.device, torch.float32)) + + m2 = torch.einsum('i j k l, j r, i p -> p r k l', + ldm_patched.modules.model_management.cast_to_device(t2, weight.device, torch.float32), + ldm_patched.modules.model_management.cast_to_device(w2b, weight.device, torch.float32), + ldm_patched.modules.model_management.cast_to_device(w2a, weight.device, torch.float32)) + else: + m1 = torch.mm(ldm_patched.modules.model_management.cast_to_device(w1a, weight.device, torch.float32), + ldm_patched.modules.model_management.cast_to_device(w1b, weight.device, torch.float32)) + m2 = torch.mm(ldm_patched.modules.model_management.cast_to_device(w2a, weight.device, torch.float32), + ldm_patched.modules.model_management.cast_to_device(w2b, weight.device, torch.float32)) + + try: + weight += (alpha * m1 * m2).reshape(weight.shape).type(weight.dtype) + except Exception as e: + print("ERROR", key, e) + elif patch_type == "glora": + if v[4] is not None: + alpha *= v[4] / v[0].shape[0] + + a1 = ldm_patched.modules.model_management.cast_to_device(v[0].flatten(start_dim=1), weight.device, torch.float32) + a2 = ldm_patched.modules.model_management.cast_to_device(v[1].flatten(start_dim=1), weight.device, torch.float32) + b1 = ldm_patched.modules.model_management.cast_to_device(v[2].flatten(start_dim=1), weight.device, torch.float32) + b2 = ldm_patched.modules.model_management.cast_to_device(v[3].flatten(start_dim=1), weight.device, torch.float32) + + weight += ((torch.mm(b2, b1) + torch.mm(torch.mm(weight.flatten(start_dim=1), a2), a1)) * alpha).reshape(weight.shape).type(weight.dtype) + else: + print("patch type not recognized", patch_type, key) + + return weight + + +class BrownianTreeNoiseSamplerPatched: + transform = None + tree = None + + @staticmethod + def global_init(x, sigma_min, sigma_max, seed=None, transform=lambda x: x, cpu=False): + if ldm_patched.modules.model_management.directml_enabled: + cpu = True + + t0, t1 = transform(torch.as_tensor(sigma_min)), transform(torch.as_tensor(sigma_max)) + + BrownianTreeNoiseSamplerPatched.transform = transform + BrownianTreeNoiseSamplerPatched.tree = BatchedBrownianTree(x, t0, t1, seed, cpu=cpu) + + def __init__(self, *args, **kwargs): + pass + + @staticmethod + def __call__(sigma, sigma_next): + transform = BrownianTreeNoiseSamplerPatched.transform + tree = BrownianTreeNoiseSamplerPatched.tree + + t0, t1 = transform(torch.as_tensor(sigma)), transform(torch.as_tensor(sigma_next)) + return tree(t0, t1) / (t1 - t0).abs().sqrt() + + +def compute_cfg(uncond, cond, cfg_scale, t): + pid = os.getpid() + mimic_cfg = float(patch_settings[pid].adaptive_cfg) + real_cfg = float(cfg_scale) + + real_eps = uncond + real_cfg * (cond - uncond) + + if cfg_scale > patch_settings[pid].adaptive_cfg: + mimicked_eps = uncond + mimic_cfg * (cond - uncond) + return real_eps * t + mimicked_eps * (1 - t) + else: + return real_eps + + +def patched_sampling_function(model, x, timestep, uncond, cond, cond_scale, model_options=None, seed=None): + pid = os.getpid() + + if math.isclose(cond_scale, 1.0) and not model_options.get("disable_cfg1_optimization", False): + final_x0 = calc_cond_uncond_batch(model, cond, None, x, timestep, model_options)[0] + + if patch_settings[pid].eps_record is not None: + patch_settings[pid].eps_record = ((x - final_x0) / timestep).cpu() + + return final_x0 + + positive_x0, negative_x0 = calc_cond_uncond_batch(model, cond, uncond, x, timestep, model_options) + + positive_eps = x - positive_x0 + negative_eps = x - negative_x0 + + alpha = 0.001 * patch_settings[pid].sharpness * patch_settings[pid].global_diffusion_progress + + positive_eps_degraded = anisotropic.adaptive_anisotropic_filter(x=positive_eps, g=positive_x0) + positive_eps_degraded_weighted = positive_eps_degraded * alpha + positive_eps * (1.0 - alpha) + + final_eps = compute_cfg(uncond=negative_eps, cond=positive_eps_degraded_weighted, + cfg_scale=cond_scale, t=patch_settings[pid].global_diffusion_progress) + + if patch_settings[pid].eps_record is not None: + patch_settings[pid].eps_record = (final_eps / timestep).cpu() + + return x - final_eps + + +def round_to_64(x): + h = float(x) + h = h / 64.0 + h = round(h) + h = int(h) + h = h * 64 + return h + + +def sdxl_encode_adm_patched(self, **kwargs): + clip_pooled = ldm_patched.modules.model_base.sdxl_pooled(kwargs, self.noise_augmentor) + width = kwargs.get("width", 1024) + height = kwargs.get("height", 1024) + target_width = width + target_height = height + pid = os.getpid() + + if kwargs.get("prompt_type", "") == "negative": + width = float(width) * patch_settings[pid].negative_adm_scale + height = float(height) * patch_settings[pid].negative_adm_scale + elif kwargs.get("prompt_type", "") == "positive": + width = float(width) * patch_settings[pid].positive_adm_scale + height = float(height) * patch_settings[pid].positive_adm_scale + + def embedder(number_list): + h = self.embedder(torch.tensor(number_list, dtype=torch.float32)) + h = torch.flatten(h).unsqueeze(dim=0).repeat(clip_pooled.shape[0], 1) + return h + + width, height = int(width), int(height) + target_width, target_height = round_to_64(target_width), round_to_64(target_height) + + adm_emphasized = embedder([height, width, 0, 0, target_height, target_width]) + adm_consistent = embedder([target_height, target_width, 0, 0, target_height, target_width]) + + clip_pooled = clip_pooled.to(adm_emphasized) + final_adm = torch.cat((clip_pooled, adm_emphasized, clip_pooled, adm_consistent), dim=1) + + return final_adm + + +def patched_KSamplerX0Inpaint_forward(self, x, sigma, uncond, cond, cond_scale, denoise_mask, model_options={}, seed=None): + if inpaint_worker.current_task is not None: + latent_processor = self.inner_model.inner_model.process_latent_in + inpaint_latent = latent_processor(inpaint_worker.current_task.latent).to(x) + inpaint_mask = inpaint_worker.current_task.latent_mask.to(x) + + if getattr(self, 'energy_generator', None) is None: + # avoid bad results by using different seeds. + self.energy_generator = torch.Generator(device='cpu').manual_seed((seed + 1) % constants.MAX_SEED) + + energy_sigma = sigma.reshape([sigma.shape[0]] + [1] * (len(x.shape) - 1)) + current_energy = torch.randn( + x.size(), dtype=x.dtype, generator=self.energy_generator, device="cpu").to(x) * energy_sigma + x = x * inpaint_mask + (inpaint_latent + current_energy) * (1.0 - inpaint_mask) + + out = self.inner_model(x, sigma, + cond=cond, + uncond=uncond, + cond_scale=cond_scale, + model_options=model_options, + seed=seed) + + out = out * inpaint_mask + inpaint_latent * (1.0 - inpaint_mask) + else: + out = self.inner_model(x, sigma, + cond=cond, + uncond=uncond, + cond_scale=cond_scale, + model_options=model_options, + seed=seed) + return out + + +def timed_adm(y, timesteps): + if isinstance(y, torch.Tensor) and int(y.dim()) == 2 and int(y.shape[1]) == 5632: + y_mask = (timesteps > 999.0 * (1.0 - float(patch_settings[os.getpid()].adm_scaler_end))).to(y)[..., None] + y_with_adm = y[..., :2816].clone() + y_without_adm = y[..., 2816:].clone() + return y_with_adm * y_mask + y_without_adm * (1.0 - y_mask) + return y + + +def patched_cldm_forward(self, x, hint, timesteps, context, y=None, **kwargs): + t_emb = ldm_patched.ldm.modules.diffusionmodules.openaimodel.timestep_embedding(timesteps, self.model_channels, repeat_only=False).to(x.dtype) + emb = self.time_embed(t_emb) + pid = os.getpid() + + guided_hint = self.input_hint_block(hint, emb, context) + + y = timed_adm(y, timesteps) + + outs = [] + + hs = [] + if self.num_classes is not None: + assert y.shape[0] == x.shape[0] + emb = emb + self.label_emb(y) + + h = x + for module, zero_conv in zip(self.input_blocks, self.zero_convs): + if guided_hint is not None: + h = module(h, emb, context) + h += guided_hint + guided_hint = None + else: + h = module(h, emb, context) + outs.append(zero_conv(h, emb, context)) + + h = self.middle_block(h, emb, context) + outs.append(self.middle_block_out(h, emb, context)) + + if patch_settings[pid].controlnet_softness > 0: + for i in range(10): + k = 1.0 - float(i) / 9.0 + outs[i] = outs[i] * (1.0 - patch_settings[pid].controlnet_softness * k) + + return outs + + +def patched_unet_forward(self, x, timesteps=None, context=None, y=None, control=None, transformer_options={}, **kwargs): + self.current_step = 1.0 - timesteps.to(x) / 999.0 + patch_settings[os.getpid()].global_diffusion_progress = float(self.current_step.detach().cpu().numpy().tolist()[0]) + + y = timed_adm(y, timesteps) + + transformer_options["original_shape"] = list(x.shape) + transformer_options["transformer_index"] = 0 + transformer_patches = transformer_options.get("patches", {}) + + num_video_frames = kwargs.get("num_video_frames", self.default_num_video_frames) + image_only_indicator = kwargs.get("image_only_indicator", self.default_image_only_indicator) + time_context = kwargs.get("time_context", None) + + assert (y is not None) == ( + self.num_classes is not None + ), "must specify y if and only if the model is class-conditional" + hs = [] + t_emb = ldm_patched.ldm.modules.diffusionmodules.openaimodel.timestep_embedding(timesteps, self.model_channels, repeat_only=False).to(x.dtype) + emb = self.time_embed(t_emb) + + if self.num_classes is not None: + assert y.shape[0] == x.shape[0] + emb = emb + self.label_emb(y) + + h = x + for id, module in enumerate(self.input_blocks): + transformer_options["block"] = ("input", id) + h = forward_timestep_embed(module, h, emb, context, transformer_options, time_context=time_context, num_video_frames=num_video_frames, image_only_indicator=image_only_indicator) + h = apply_control(h, control, 'input') + if "input_block_patch" in transformer_patches: + patch = transformer_patches["input_block_patch"] + for p in patch: + h = p(h, transformer_options) + + hs.append(h) + if "input_block_patch_after_skip" in transformer_patches: + patch = transformer_patches["input_block_patch_after_skip"] + for p in patch: + h = p(h, transformer_options) + + transformer_options["block"] = ("middle", 0) + h = forward_timestep_embed(self.middle_block, h, emb, context, transformer_options, time_context=time_context, num_video_frames=num_video_frames, image_only_indicator=image_only_indicator) + h = apply_control(h, control, 'middle') + + for id, module in enumerate(self.output_blocks): + transformer_options["block"] = ("output", id) + hsp = hs.pop() + hsp = apply_control(hsp, control, 'output') + + if "output_block_patch" in transformer_patches: + patch = transformer_patches["output_block_patch"] + for p in patch: + h, hsp = p(h, hsp, transformer_options) + + h = torch.cat([h, hsp], dim=1) + del hsp + if len(hs) > 0: + output_shape = hs[-1].shape + else: + output_shape = None + h = forward_timestep_embed(module, h, emb, context, transformer_options, output_shape, time_context=time_context, num_video_frames=num_video_frames, image_only_indicator=image_only_indicator) + h = h.type(x.dtype) + if self.predict_codebook_ids: + return self.id_predictor(h) + else: + return self.out(h) + + +def patched_load_models_gpu(*args, **kwargs): + execution_start_time = time.perf_counter() + y = ldm_patched.modules.model_management.load_models_gpu_origin(*args, **kwargs) + moving_time = time.perf_counter() - execution_start_time + if moving_time > 0.1: + print(f'[Fooocus Model Management] Moving model(s) has taken {moving_time:.2f} seconds') + return y + + +def build_loaded(module, loader_name): + original_loader_name = loader_name + '_origin' + + if not hasattr(module, original_loader_name): + setattr(module, original_loader_name, getattr(module, loader_name)) + + original_loader = getattr(module, original_loader_name) + + def loader(*args, **kwargs): + result = None + try: + result = original_loader(*args, **kwargs) + except Exception as e: + result = None + exp = str(e) + '\n' + for path in list(args) + list(kwargs.values()): + if isinstance(path, str): + if os.path.exists(path): + exp += f'File corrupted: {path} \n' + corrupted_backup_file = path + '.corrupted' + if os.path.exists(corrupted_backup_file): + os.remove(corrupted_backup_file) + os.replace(path, corrupted_backup_file) + if os.path.exists(path): + os.remove(path) + exp += f'Fooocus has tried to move the corrupted file to {corrupted_backup_file} \n' + exp += f'You may try again now and Fooocus will download models again. \n' + raise ValueError(exp) + return result + + setattr(module, loader_name, loader) + return + + +def patch_all(): + if ldm_patched.modules.model_management.directml_enabled: + ldm_patched.modules.model_management.lowvram_available = True + ldm_patched.modules.model_management.OOM_EXCEPTION = Exception + + patch_all_precision() + patch_all_clip() + + if not hasattr(ldm_patched.modules.model_management, 'load_models_gpu_origin'): + ldm_patched.modules.model_management.load_models_gpu_origin = ldm_patched.modules.model_management.load_models_gpu + + ldm_patched.modules.model_management.load_models_gpu = patched_load_models_gpu + ldm_patched.modules.model_patcher.ModelPatcher.calculate_weight = calculate_weight_patched + ldm_patched.controlnet.cldm.ControlNet.forward = patched_cldm_forward + ldm_patched.ldm.modules.diffusionmodules.openaimodel.UNetModel.forward = patched_unet_forward + ldm_patched.modules.model_base.SDXL.encode_adm = sdxl_encode_adm_patched + ldm_patched.modules.samplers.KSamplerX0Inpaint.forward = patched_KSamplerX0Inpaint_forward + ldm_patched.k_diffusion.sampling.BrownianTreeNoiseSampler = BrownianTreeNoiseSamplerPatched + ldm_patched.modules.samplers.sampling_function = patched_sampling_function + + warnings.filterwarnings(action='ignore', module='torchsde') + + build_loaded(safetensors.torch, 'load_file') + build_loaded(torch, 'load') + + return diff --git a/modules/patch_clip.py b/modules/patch_clip.py new file mode 100644 index 0000000000000000000000000000000000000000..06b7f01bb857b01995ff7b0326813f98f92ea76d --- /dev/null +++ b/modules/patch_clip.py @@ -0,0 +1,195 @@ +# Consistent with Kohya/A1111 to reduce differences between model training and inference. + +import os +import torch +import ldm_patched.controlnet.cldm +import ldm_patched.k_diffusion.sampling +import ldm_patched.ldm.modules.attention +import ldm_patched.ldm.modules.diffusionmodules.model +import ldm_patched.ldm.modules.diffusionmodules.openaimodel +import ldm_patched.ldm.modules.diffusionmodules.openaimodel +import ldm_patched.modules.args_parser +import ldm_patched.modules.model_base +import ldm_patched.modules.model_management +import ldm_patched.modules.model_patcher +import ldm_patched.modules.samplers +import ldm_patched.modules.sd +import ldm_patched.modules.sd1_clip +import ldm_patched.modules.clip_vision +import ldm_patched.modules.ops as ops + +from modules.ops import use_patched_ops +from transformers import CLIPTextModel, CLIPTextConfig, modeling_utils, CLIPVisionConfig, CLIPVisionModelWithProjection + + +def patched_encode_token_weights(self, token_weight_pairs): + to_encode = list() + max_token_len = 0 + has_weights = False + for x in token_weight_pairs: + tokens = list(map(lambda a: a[0], x)) + max_token_len = max(len(tokens), max_token_len) + has_weights = has_weights or not all(map(lambda a: a[1] == 1.0, x)) + to_encode.append(tokens) + + sections = len(to_encode) + if has_weights or sections == 0: + to_encode.append(ldm_patched.modules.sd1_clip.gen_empty_tokens(self.special_tokens, max_token_len)) + + out, pooled = self.encode(to_encode) + if pooled is not None: + first_pooled = pooled[0:1].to(ldm_patched.modules.model_management.intermediate_device()) + else: + first_pooled = pooled + + output = [] + for k in range(0, sections): + z = out[k:k + 1] + if has_weights: + original_mean = z.mean() + z_empty = out[-1] + for i in range(len(z)): + for j in range(len(z[i])): + weight = token_weight_pairs[k][j][1] + if weight != 1.0: + z[i][j] = (z[i][j] - z_empty[j]) * weight + z_empty[j] + new_mean = z.mean() + z = z * (original_mean / new_mean) + output.append(z) + + if len(output) == 0: + return out[-1:].to(ldm_patched.modules.model_management.intermediate_device()), first_pooled + return torch.cat(output, dim=-2).to(ldm_patched.modules.model_management.intermediate_device()), first_pooled + + +def patched_SDClipModel__init__(self, max_length=77, freeze=True, layer="last", layer_idx=None, + textmodel_json_config=None, dtype=None, special_tokens=None, + layer_norm_hidden_state=True, **kwargs): + torch.nn.Module.__init__(self) + assert layer in self.LAYERS + + if special_tokens is None: + special_tokens = {"start": 49406, "end": 49407, "pad": 49407} + + if textmodel_json_config is None: + textmodel_json_config = os.path.join(os.path.dirname(os.path.realpath(ldm_patched.modules.sd1_clip.__file__)), + "sd1_clip_config.json") + + config = CLIPTextConfig.from_json_file(textmodel_json_config) + self.num_layers = config.num_hidden_layers + + with use_patched_ops(ops.manual_cast): + with modeling_utils.no_init_weights(): + self.transformer = CLIPTextModel(config) + + if dtype is not None: + self.transformer.to(dtype) + + self.transformer.text_model.embeddings.to(torch.float32) + + if freeze: + self.freeze() + + self.max_length = max_length + self.layer = layer + self.layer_idx = None + self.special_tokens = special_tokens + self.text_projection = torch.nn.Parameter(torch.eye(self.transformer.get_input_embeddings().weight.shape[1])) + self.logit_scale = torch.nn.Parameter(torch.tensor(4.6055)) + self.enable_attention_masks = False + + self.layer_norm_hidden_state = layer_norm_hidden_state + if layer == "hidden": + assert layer_idx is not None + assert abs(layer_idx) < self.num_layers + self.clip_layer(layer_idx) + self.layer_default = (self.layer, self.layer_idx) + + +def patched_SDClipModel_forward(self, tokens): + backup_embeds = self.transformer.get_input_embeddings() + device = backup_embeds.weight.device + tokens = self.set_up_textual_embeddings(tokens, backup_embeds) + tokens = torch.LongTensor(tokens).to(device) + + attention_mask = None + if self.enable_attention_masks: + attention_mask = torch.zeros_like(tokens) + max_token = self.transformer.get_input_embeddings().weight.shape[0] - 1 + for x in range(attention_mask.shape[0]): + for y in range(attention_mask.shape[1]): + attention_mask[x, y] = 1 + if tokens[x, y] == max_token: + break + + outputs = self.transformer(input_ids=tokens, attention_mask=attention_mask, + output_hidden_states=self.layer == "hidden") + self.transformer.set_input_embeddings(backup_embeds) + + if self.layer == "last": + z = outputs.last_hidden_state + elif self.layer == "pooled": + z = outputs.pooler_output[:, None, :] + else: + z = outputs.hidden_states[self.layer_idx] + if self.layer_norm_hidden_state: + z = self.transformer.text_model.final_layer_norm(z) + + if hasattr(outputs, "pooler_output"): + pooled_output = outputs.pooler_output.float() + else: + pooled_output = None + + if self.text_projection is not None and pooled_output is not None: + pooled_output = pooled_output.float().to(self.text_projection.device) @ self.text_projection.float() + + return z.float(), pooled_output + + +def patched_ClipVisionModel__init__(self, json_config): + config = CLIPVisionConfig.from_json_file(json_config) + + self.load_device = ldm_patched.modules.model_management.text_encoder_device() + self.offload_device = ldm_patched.modules.model_management.text_encoder_offload_device() + + if ldm_patched.modules.model_management.should_use_fp16(self.load_device, prioritize_performance=False): + self.dtype = torch.float16 + else: + self.dtype = torch.float32 + + with use_patched_ops(ops.manual_cast): + with modeling_utils.no_init_weights(): + self.model = CLIPVisionModelWithProjection(config) + + self.model.to(self.dtype) + self.patcher = ldm_patched.modules.model_patcher.ModelPatcher( + self.model, + load_device=self.load_device, + offload_device=self.offload_device + ) + + +def patched_ClipVisionModel_encode_image(self, image): + ldm_patched.modules.model_management.load_model_gpu(self.patcher) + pixel_values = ldm_patched.modules.clip_vision.clip_preprocess(image.to(self.load_device)) + outputs = self.model(pixel_values=pixel_values, output_hidden_states=True) + + for k in outputs: + t = outputs[k] + if t is not None: + if k == 'hidden_states': + outputs["penultimate_hidden_states"] = t[-2].to(ldm_patched.modules.model_management.intermediate_device()) + outputs["hidden_states"] = None + else: + outputs[k] = t.to(ldm_patched.modules.model_management.intermediate_device()) + + return outputs + + +def patch_all_clip(): + ldm_patched.modules.sd1_clip.ClipTokenWeightEncoder.encode_token_weights = patched_encode_token_weights + ldm_patched.modules.sd1_clip.SDClipModel.__init__ = patched_SDClipModel__init__ + ldm_patched.modules.sd1_clip.SDClipModel.forward = patched_SDClipModel_forward + ldm_patched.modules.clip_vision.ClipVisionModel.__init__ = patched_ClipVisionModel__init__ + ldm_patched.modules.clip_vision.ClipVisionModel.encode_image = patched_ClipVisionModel_encode_image + return diff --git a/modules/patch_precision.py b/modules/patch_precision.py new file mode 100644 index 0000000000000000000000000000000000000000..83569bdd15f5ab0cac2c57353626c4e843bd264d --- /dev/null +++ b/modules/patch_precision.py @@ -0,0 +1,60 @@ +# Consistent with Kohya to reduce differences between model training and inference. + +import torch +import math +import einops +import numpy as np + +import ldm_patched.ldm.modules.diffusionmodules.openaimodel +import ldm_patched.modules.model_sampling +import ldm_patched.modules.sd1_clip + +from ldm_patched.ldm.modules.diffusionmodules.util import make_beta_schedule + + +def patched_timestep_embedding(timesteps, dim, max_period=10000, repeat_only=False): + # Consistent with Kohya to reduce differences between model training and inference. + + if not repeat_only: + half = dim // 2 + freqs = torch.exp( + -math.log(max_period) * torch.arange(start=0, end=half, dtype=torch.float32) / half + ).to(device=timesteps.device) + args = timesteps[:, None].float() * freqs[None] + embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1) + if dim % 2: + embedding = torch.cat([embedding, torch.zeros_like(embedding[:, :1])], dim=-1) + else: + embedding = einops.repeat(timesteps, 'b -> b d', d=dim) + return embedding + + +def patched_register_schedule(self, given_betas=None, beta_schedule="linear", timesteps=1000, + linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3): + # Consistent with Kohya to reduce differences between model training and inference. + + if given_betas is not None: + betas = given_betas + else: + betas = make_beta_schedule( + beta_schedule, + timesteps, + linear_start=linear_start, + linear_end=linear_end, + cosine_s=cosine_s) + + alphas = 1. - betas + alphas_cumprod = np.cumprod(alphas, axis=0) + timesteps, = betas.shape + self.num_timesteps = int(timesteps) + self.linear_start = linear_start + self.linear_end = linear_end + sigmas = torch.tensor(((1 - alphas_cumprod) / alphas_cumprod) ** 0.5, dtype=torch.float32) + self.set_sigmas(sigmas) + return + + +def patch_all_precision(): + ldm_patched.ldm.modules.diffusionmodules.openaimodel.timestep_embedding = patched_timestep_embedding + ldm_patched.modules.model_sampling.ModelSamplingDiscrete._register_schedule = patched_register_schedule + return diff --git a/modules/private_logger.py b/modules/private_logger.py new file mode 100644 index 0000000000000000000000000000000000000000..01e570a7d96375a15a81b6f07a678b1f7eda743e --- /dev/null +++ b/modules/private_logger.py @@ -0,0 +1,130 @@ +import os +import args_manager +import modules.config +import json +import urllib.parse + +from PIL import Image +from PIL.PngImagePlugin import PngInfo +from modules.util import generate_temp_filename +from modules.meta_parser import MetadataParser, get_exif + +log_cache = {} + + +def get_current_html_path(output_format=None): + output_format = output_format if output_format else modules.config.default_output_format + date_string, local_temp_filename, only_name = generate_temp_filename(folder=modules.config.path_outputs, + extension=output_format) + html_name = os.path.join(os.path.dirname(local_temp_filename), 'log.html') + return html_name + + +def log(img, metadata, metadata_parser: MetadataParser | None = None, output_format=None) -> str: + path_outputs = args_manager.args.temp_path if args_manager.args.disable_image_log else modules.config.path_outputs + output_format = output_format if output_format else modules.config.default_output_format + date_string, local_temp_filename, only_name = generate_temp_filename(folder=path_outputs, extension=output_format) + os.makedirs(os.path.dirname(local_temp_filename), exist_ok=True) + + parsed_parameters = metadata_parser.parse_string(metadata.copy()) if metadata_parser is not None else '' + image = Image.fromarray(img) + + if output_format == 'png': + if parsed_parameters != '': + pnginfo = PngInfo() + pnginfo.add_text('parameters', parsed_parameters) + pnginfo.add_text('fooocus_scheme', metadata_parser.get_scheme().value) + else: + pnginfo = None + image.save(local_temp_filename, pnginfo=pnginfo) + elif output_format == 'jpg': + image.save(local_temp_filename, quality=95, optimize=True, progressive=True, exif=get_exif(parsed_parameters, metadata_parser.get_scheme().value) if metadata_parser else Image.Exif()) + elif output_format == 'webp': + image.save(local_temp_filename, quality=95, lossless=False, exif=get_exif(parsed_parameters, metadata_parser.get_scheme().value) if metadata_parser else Image.Exif()) + else: + image.save(local_temp_filename) + + if args_manager.args.disable_image_log: + return local_temp_filename + + html_name = os.path.join(os.path.dirname(local_temp_filename), 'log.html') + + css_styles = ( + "" + ) + + js = ( + """""" + ) + + begin_part = f"Fooocus Log {date_string}{css_styles}{js}

Fooocus Log {date_string} (private)

\n

Metadata is embedded if enabled in the config or developer debug mode. You can find the information for each image in line Metadata Scheme.

\n\n" + end_part = f'\n' + + middle_part = log_cache.get(html_name, "") + + if middle_part == "": + if os.path.exists(html_name): + existing_split = open(html_name, 'r', encoding='utf-8').read().split('') + if len(existing_split) == 3: + middle_part = existing_split[1] + else: + middle_part = existing_split[0] + + div_name = only_name.replace('.', '_') + item = f"

\n" + item += f"" + item += "" + item += "
{only_name}
" + for label, key, value in metadata: + value_txt = str(value).replace('\n', '
') + item += f"\n" + item += "" + + js_txt = urllib.parse.quote(json.dumps({k: v for _, k, v in metadata}, indent=0), safe='') + item += f"
" + + item += "
\n\n" + + middle_part = item + middle_part + + with open(html_name, 'w', encoding='utf-8') as f: + f.write(begin_part + middle_part + end_part) + + print(f'Image generated with private log at: {html_name}') + + log_cache[html_name] = middle_part + + return local_temp_filename diff --git a/modules/sample_hijack.py b/modules/sample_hijack.py new file mode 100644 index 0000000000000000000000000000000000000000..5936a096d9f0afaac0a672f72cee5f84b23496ad --- /dev/null +++ b/modules/sample_hijack.py @@ -0,0 +1,184 @@ +import torch +import ldm_patched.modules.samplers +import ldm_patched.modules.model_management + +from collections import namedtuple +from ldm_patched.contrib.external_custom_sampler import SDTurboScheduler +from ldm_patched.k_diffusion import sampling as k_diffusion_sampling +from ldm_patched.modules.samplers import normal_scheduler, simple_scheduler, ddim_scheduler +from ldm_patched.modules.model_base import SDXLRefiner, SDXL +from ldm_patched.modules.conds import CONDRegular +from ldm_patched.modules.sample import get_additional_models, get_models_from_cond, cleanup_additional_models +from ldm_patched.modules.samplers import resolve_areas_and_cond_masks, wrap_model, calculate_start_end_timesteps, \ + create_cond_with_same_area_if_none, pre_run_control, apply_empty_x_to_equal_area, encode_model_conds + + +current_refiner = None +refiner_switch_step = -1 + + +@torch.no_grad() +@torch.inference_mode() +def clip_separate_inner(c, p, target_model=None, target_clip=None): + if target_model is None or isinstance(target_model, SDXLRefiner): + c = c[..., -1280:].clone() + elif isinstance(target_model, SDXL): + c = c.clone() + else: + p = None + c = c[..., :768].clone() + + final_layer_norm = target_clip.cond_stage_model.clip_l.transformer.text_model.final_layer_norm + + final_layer_norm_origin_device = final_layer_norm.weight.device + final_layer_norm_origin_dtype = final_layer_norm.weight.dtype + + c_origin_device = c.device + c_origin_dtype = c.dtype + + final_layer_norm.to(device='cpu', dtype=torch.float32) + c = c.to(device='cpu', dtype=torch.float32) + + c = torch.chunk(c, int(c.size(1)) // 77, 1) + c = [final_layer_norm(ci) for ci in c] + c = torch.cat(c, dim=1) + + final_layer_norm.to(device=final_layer_norm_origin_device, dtype=final_layer_norm_origin_dtype) + c = c.to(device=c_origin_device, dtype=c_origin_dtype) + return c, p + + +@torch.no_grad() +@torch.inference_mode() +def clip_separate(cond, target_model=None, target_clip=None): + results = [] + + for c, px in cond: + p = px.get('pooled_output', None) + c, p = clip_separate_inner(c, p, target_model=target_model, target_clip=target_clip) + p = {} if p is None else {'pooled_output': p.clone()} + results.append([c, p]) + + return results + + +@torch.no_grad() +@torch.inference_mode() +def clip_separate_after_preparation(cond, target_model=None, target_clip=None): + results = [] + + for x in cond: + p = x.get('pooled_output', None) + c = x['model_conds']['c_crossattn'].cond + + c, p = clip_separate_inner(c, p, target_model=target_model, target_clip=target_clip) + + result = {'model_conds': {'c_crossattn': CONDRegular(c)}} + + if p is not None: + result['pooled_output'] = p.clone() + + results.append(result) + + return results + + +@torch.no_grad() +@torch.inference_mode() +def sample_hacked(model, noise, positive, negative, cfg, device, sampler, sigmas, model_options={}, latent_image=None, denoise_mask=None, callback=None, disable_pbar=False, seed=None): + global current_refiner + + positive = positive[:] + negative = negative[:] + + resolve_areas_and_cond_masks(positive, noise.shape[2], noise.shape[3], device) + resolve_areas_and_cond_masks(negative, noise.shape[2], noise.shape[3], device) + + model_wrap = wrap_model(model) + + calculate_start_end_timesteps(model, negative) + calculate_start_end_timesteps(model, positive) + + if latent_image is not None: + latent_image = model.process_latent_in(latent_image) + + if hasattr(model, 'extra_conds'): + positive = encode_model_conds(model.extra_conds, positive, noise, device, "positive", latent_image=latent_image, denoise_mask=denoise_mask) + negative = encode_model_conds(model.extra_conds, negative, noise, device, "negative", latent_image=latent_image, denoise_mask=denoise_mask) + + #make sure each cond area has an opposite one with the same area + for c in positive: + create_cond_with_same_area_if_none(negative, c) + for c in negative: + create_cond_with_same_area_if_none(positive, c) + + # pre_run_control(model, negative + positive) + pre_run_control(model, positive) # negative is not necessary in Fooocus, 0.5s faster. + + apply_empty_x_to_equal_area(list(filter(lambda c: c.get('control_apply_to_uncond', False) == True, positive)), negative, 'control', lambda cond_cnets, x: cond_cnets[x]) + apply_empty_x_to_equal_area(positive, negative, 'gligen', lambda cond_cnets, x: cond_cnets[x]) + + extra_args = {"cond":positive, "uncond":negative, "cond_scale": cfg, "model_options": model_options, "seed":seed} + + if current_refiner is not None and hasattr(current_refiner.model, 'extra_conds'): + positive_refiner = clip_separate_after_preparation(positive, target_model=current_refiner.model) + negative_refiner = clip_separate_after_preparation(negative, target_model=current_refiner.model) + + positive_refiner = encode_model_conds(current_refiner.model.extra_conds, positive_refiner, noise, device, "positive", latent_image=latent_image, denoise_mask=denoise_mask) + negative_refiner = encode_model_conds(current_refiner.model.extra_conds, negative_refiner, noise, device, "negative", latent_image=latent_image, denoise_mask=denoise_mask) + + def refiner_switch(): + cleanup_additional_models(set(get_models_from_cond(positive, "control") + get_models_from_cond(negative, "control"))) + + extra_args["cond"] = positive_refiner + extra_args["uncond"] = negative_refiner + + # clear ip-adapter for refiner + extra_args['model_options'] = {k: {} if k == 'transformer_options' else v for k, v in extra_args['model_options'].items()} + + models, inference_memory = get_additional_models(positive_refiner, negative_refiner, current_refiner.model_dtype()) + ldm_patched.modules.model_management.load_models_gpu( + [current_refiner] + models, + model.memory_required([noise.shape[0] * 2] + list(noise.shape[1:])) + inference_memory) + + model_wrap.inner_model = current_refiner.model + print('Refiner Swapped') + return + + def callback_wrap(step, x0, x, total_steps): + if step == refiner_switch_step and current_refiner is not None: + refiner_switch() + if callback is not None: + # residual_noise_preview = x - x0 + # residual_noise_preview /= residual_noise_preview.std() + # residual_noise_preview *= x0.std() + callback(step, x0, x, total_steps) + + samples = sampler.sample(model_wrap, sigmas, extra_args, callback_wrap, noise, latent_image, denoise_mask, disable_pbar) + return model.process_latent_out(samples.to(torch.float32)) + + +@torch.no_grad() +@torch.inference_mode() +def calculate_sigmas_scheduler_hacked(model, scheduler_name, steps): + if scheduler_name == "karras": + sigmas = k_diffusion_sampling.get_sigmas_karras(n=steps, sigma_min=float(model.model_sampling.sigma_min), sigma_max=float(model.model_sampling.sigma_max)) + elif scheduler_name == "exponential": + sigmas = k_diffusion_sampling.get_sigmas_exponential(n=steps, sigma_min=float(model.model_sampling.sigma_min), sigma_max=float(model.model_sampling.sigma_max)) + elif scheduler_name == "normal": + sigmas = normal_scheduler(model, steps) + elif scheduler_name == "simple": + sigmas = simple_scheduler(model, steps) + elif scheduler_name == "ddim_uniform": + sigmas = ddim_scheduler(model, steps) + elif scheduler_name == "sgm_uniform": + sigmas = normal_scheduler(model, steps, sgm=True) + elif scheduler_name == "turbo": + sigmas = SDTurboScheduler().get_sigmas(namedtuple('Patcher', ['model'])(model=model), steps=steps, denoise=1.0)[0] + else: + raise TypeError("error invalid scheduler") + return sigmas + + +ldm_patched.modules.samplers.calculate_sigmas_scheduler = calculate_sigmas_scheduler_hacked +ldm_patched.modules.samplers.sample = sample_hacked diff --git a/modules/sdxl_styles.py b/modules/sdxl_styles.py new file mode 100644 index 0000000000000000000000000000000000000000..2a310024cdd0f96cb20341f811a50146000b586b --- /dev/null +++ b/modules/sdxl_styles.py @@ -0,0 +1,117 @@ +import os +import re +import json +import math + +from modules.util import get_files_from_folder + + +# cannot use modules.config - validators causing circular imports +styles_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '../sdxl_styles/')) +wildcards_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '../wildcards/')) +wildcards_max_bfs_depth = 64 + + +def normalize_key(k): + k = k.replace('-', ' ') + words = k.split(' ') + words = [w[:1].upper() + w[1:].lower() for w in words] + k = ' '.join(words) + k = k.replace('3d', '3D') + k = k.replace('Sai', 'SAI') + k = k.replace('Mre', 'MRE') + k = k.replace('(s', '(S') + return k + + +styles = {} + +styles_files = get_files_from_folder(styles_path, ['.json']) + +for x in ['sdxl_styles_fooocus.json', + 'sdxl_styles_sai.json', + 'sdxl_styles_mre.json', + 'sdxl_styles_twri.json', + 'sdxl_styles_diva.json', + 'sdxl_styles_marc_k3nt3l.json']: + if x in styles_files: + styles_files.remove(x) + styles_files.append(x) + +for styles_file in styles_files: + try: + with open(os.path.join(styles_path, styles_file), encoding='utf-8') as f: + for entry in json.load(f): + name = normalize_key(entry['name']) + prompt = entry['prompt'] if 'prompt' in entry else '' + negative_prompt = entry['negative_prompt'] if 'negative_prompt' in entry else '' + styles[name] = (prompt, negative_prompt) + except Exception as e: + print(str(e)) + print(f'Failed to load style file {styles_file}') + +style_keys = list(styles.keys()) +fooocus_expansion = "Fooocus V2" +legal_style_names = [fooocus_expansion] + style_keys + + +def apply_style(style, positive): + p, n = styles[style] + return p.replace('{prompt}', positive).splitlines(), n.splitlines() + + +def apply_wildcards(wildcard_text, rng, directory=wildcards_path): + for _ in range(wildcards_max_bfs_depth): + placeholders = re.findall(r'__([\w-]+)__', wildcard_text) + if len(placeholders) == 0: + return wildcard_text + + print(f'[Wildcards] processing: {wildcard_text}') + for placeholder in placeholders: + try: + words = open(os.path.join(directory, f'{placeholder}.txt'), encoding='utf-8').read().splitlines() + words = [x for x in words if x != ''] + assert len(words) > 0 + wildcard_text = wildcard_text.replace(f'__{placeholder}__', rng.choice(words), 1) + except: + print(f'[Wildcards] Warning: {placeholder}.txt missing or empty. ' + f'Using "{placeholder}" as a normal word.') + wildcard_text = wildcard_text.replace(f'__{placeholder}__', placeholder) + print(f'[Wildcards] {wildcard_text}') + + print(f'[Wildcards] BFS stack overflow. Current text: {wildcard_text}') + return wildcard_text + +def get_words(arrays, totalMult, index): + if(len(arrays) == 1): + return [arrays[0].split(',')[index]] + else: + words = arrays[0].split(',') + word = words[index % len(words)] + index -= index % len(words) + index /= len(words) + index = math.floor(index) + return [word] + get_words(arrays[1:], math.floor(totalMult/len(words)), index) + + +def apply_arrays(text, index): + arrays = re.findall(r'\[\[(.*?)\]\]', text) + if len(arrays) == 0: + return text + + print(f'[Arrays] processing: {text}') + mult = 1 + for arr in arrays: + words = arr.split(',') + mult *= len(words) + + index %= mult + chosen_words = get_words(arrays, mult, index) + + i = 0 + for arr in arrays: + text = text.replace(f'[[{arr}]]', chosen_words[i], 1) + i = i+1 + + return text + diff --git a/modules/style_sorter.py b/modules/style_sorter.py new file mode 100644 index 0000000000000000000000000000000000000000..49142bc7926e06ee29f5678de1a9acc13dac5b70 --- /dev/null +++ b/modules/style_sorter.py @@ -0,0 +1,59 @@ +import os +import gradio as gr +import modules.localization as localization +import json + + +all_styles = [] + + +def try_load_sorted_styles(style_names, default_selected): + global all_styles + + all_styles = style_names + + try: + if os.path.exists('sorted_styles.json'): + with open('sorted_styles.json', 'rt', encoding='utf-8') as fp: + sorted_styles = [] + for x in json.load(fp): + if x in all_styles: + sorted_styles.append(x) + for x in all_styles: + if x not in sorted_styles: + sorted_styles.append(x) + all_styles = sorted_styles + except Exception as e: + print('Load style sorting failed.') + print(e) + + unselected = [y for y in all_styles if y not in default_selected] + all_styles = default_selected + unselected + + return + + +def sort_styles(selected): + global all_styles + unselected = [y for y in all_styles if y not in selected] + sorted_styles = selected + unselected + try: + with open('sorted_styles.json', 'wt', encoding='utf-8') as fp: + json.dump(sorted_styles, fp, indent=4) + except Exception as e: + print('Write style sorting failed.') + print(e) + all_styles = sorted_styles + return gr.CheckboxGroup.update(choices=sorted_styles) + + +def localization_key(x): + return x + localization.current_translation.get(x, '') + + +def search_styles(selected, query): + unselected = [y for y in all_styles if y not in selected] + matched = [y for y in unselected if query.lower() in localization_key(y).lower()] if len(query.replace(' ', '')) > 0 else [] + unmatched = [y for y in unselected if y not in matched] + sorted_styles = matched + selected + unmatched + return gr.CheckboxGroup.update(choices=sorted_styles) diff --git a/modules/ui_gradio_extensions.py b/modules/ui_gradio_extensions.py new file mode 100644 index 0000000000000000000000000000000000000000..bebf9f8ca7860c700f52ea5d3d3586917f17d34b --- /dev/null +++ b/modules/ui_gradio_extensions.py @@ -0,0 +1,67 @@ +# based on https://github.com/AUTOMATIC1111/stable-diffusion-webui/blob/v1.6.0/modules/ui_gradio_extensions.py + +import os +import gradio as gr +import args_manager + +from modules.localization import localization_js + + +GradioTemplateResponseOriginal = gr.routes.templates.TemplateResponse + +modules_path = os.path.dirname(os.path.realpath(__file__)) +script_path = os.path.dirname(modules_path) + + +def webpath(fn): + if fn.startswith(script_path): + web_path = os.path.relpath(fn, script_path).replace('\\', '/') + else: + web_path = os.path.abspath(fn) + + return f'file={web_path}?{os.path.getmtime(fn)}' + + +def javascript_html(): + script_js_path = webpath('javascript/script.js') + context_menus_js_path = webpath('javascript/contextMenus.js') + localization_js_path = webpath('javascript/localization.js') + zoom_js_path = webpath('javascript/zoom.js') + edit_attention_js_path = webpath('javascript/edit-attention.js') + viewer_js_path = webpath('javascript/viewer.js') + image_viewer_js_path = webpath('javascript/imageviewer.js') + samples_path = webpath(os.path.abspath('./sdxl_styles/samples/fooocus_v2.jpg')) + head = f'\n' + head += f'\n' + head += f'\n' + head += f'\n' + head += f'\n' + head += f'\n' + head += f'\n' + head += f'\n' + head += f'\n' + + if args_manager.args.theme: + head += f'\n' + + return head + + +def css_html(): + style_css_path = webpath('css/style.css') + head = f'' + return head + + +def reload_javascript(): + js = javascript_html() + css = css_html() + + def template_response(*args, **kwargs): + res = GradioTemplateResponseOriginal(*args, **kwargs) + res.body = res.body.replace(b'', f'{js}'.encode("utf8")) + res.body = res.body.replace(b'', f'{css}'.encode("utf8")) + res.init_headers() + return res + + gr.routes.templates.TemplateResponse = template_response diff --git a/modules/upscaler.py b/modules/upscaler.py new file mode 100644 index 0000000000000000000000000000000000000000..974e4f37c8756df56b9e64143bddff1f1378bc83 --- /dev/null +++ b/modules/upscaler.py @@ -0,0 +1,34 @@ +import os +import torch +import modules.core as core + +from ldm_patched.pfn.architecture.RRDB import RRDBNet as ESRGAN +from ldm_patched.contrib.external_upscale_model import ImageUpscaleWithModel +from collections import OrderedDict +from modules.config import path_upscale_models + +model_filename = os.path.join(path_upscale_models, 'fooocus_upscaler_s409985e5.bin') +opImageUpscaleWithModel = ImageUpscaleWithModel() +model = None + + +def perform_upscale(img): + global model + + print(f'Upscaling image with shape {str(img.shape)} ...') + + if model is None: + sd = torch.load(model_filename) + sdo = OrderedDict() + for k, v in sd.items(): + sdo[k.replace('residual_block_', 'RDB')] = v + del sd + model = ESRGAN(sdo) + model.cpu() + model.eval() + + img = core.numpy_to_pytorch(img) + img = opImageUpscaleWithModel.upscale(model, img)[0] + img = core.pytorch_to_numpy(img)[0] + + return img diff --git a/modules/util.py b/modules/util.py new file mode 100644 index 0000000000000000000000000000000000000000..c7923ec8260286d4bcf858a250b0e75c7e51d97d --- /dev/null +++ b/modules/util.py @@ -0,0 +1,362 @@ +import typing + +import numpy as np +import datetime +import random +import math +import os +import cv2 +import json + +from PIL import Image +from hashlib import sha256 + +import modules.sdxl_styles + +LANCZOS = (Image.Resampling.LANCZOS if hasattr(Image, 'Resampling') else Image.LANCZOS) +HASH_SHA256_LENGTH = 10 + +def erode_or_dilate(x, k): + k = int(k) + if k > 0: + return cv2.dilate(x, kernel=np.ones(shape=(3, 3), dtype=np.uint8), iterations=k) + if k < 0: + return cv2.erode(x, kernel=np.ones(shape=(3, 3), dtype=np.uint8), iterations=-k) + return x + + +def resample_image(im, width, height): + im = Image.fromarray(im) + im = im.resize((int(width), int(height)), resample=LANCZOS) + return np.array(im) + + +def resize_image(im, width, height, resize_mode=1): + """ + Resizes an image with the specified resize_mode, width, and height. + + Args: + resize_mode: The mode to use when resizing the image. + 0: Resize the image to the specified width and height. + 1: Resize the image to fill the specified width and height, maintaining the aspect ratio, and then center the image within the dimensions, cropping the excess. + 2: Resize the image to fit within the specified width and height, maintaining the aspect ratio, and then center the image within the dimensions, filling empty with data from image. + im: The image to resize. + width: The width to resize the image to. + height: The height to resize the image to. + """ + + im = Image.fromarray(im) + + def resize(im, w, h): + return im.resize((w, h), resample=LANCZOS) + + if resize_mode == 0: + res = resize(im, width, height) + + elif resize_mode == 1: + ratio = width / height + src_ratio = im.width / im.height + + src_w = width if ratio > src_ratio else im.width * height // im.height + src_h = height if ratio <= src_ratio else im.height * width // im.width + + resized = resize(im, src_w, src_h) + res = Image.new("RGB", (width, height)) + res.paste(resized, box=(width // 2 - src_w // 2, height // 2 - src_h // 2)) + + else: + ratio = width / height + src_ratio = im.width / im.height + + src_w = width if ratio < src_ratio else im.width * height // im.height + src_h = height if ratio >= src_ratio else im.height * width // im.width + + resized = resize(im, src_w, src_h) + res = Image.new("RGB", (width, height)) + res.paste(resized, box=(width // 2 - src_w // 2, height // 2 - src_h // 2)) + + if ratio < src_ratio: + fill_height = height // 2 - src_h // 2 + if fill_height > 0: + res.paste(resized.resize((width, fill_height), box=(0, 0, width, 0)), box=(0, 0)) + res.paste(resized.resize((width, fill_height), box=(0, resized.height, width, resized.height)), box=(0, fill_height + src_h)) + elif ratio > src_ratio: + fill_width = width // 2 - src_w // 2 + if fill_width > 0: + res.paste(resized.resize((fill_width, height), box=(0, 0, 0, height)), box=(0, 0)) + res.paste(resized.resize((fill_width, height), box=(resized.width, 0, resized.width, height)), box=(fill_width + src_w, 0)) + + return np.array(res) + + +def get_shape_ceil(h, w): + return math.ceil(((h * w) ** 0.5) / 64.0) * 64.0 + + +def get_image_shape_ceil(im): + H, W = im.shape[:2] + return get_shape_ceil(H, W) + + +def set_image_shape_ceil(im, shape_ceil): + shape_ceil = float(shape_ceil) + + H_origin, W_origin, _ = im.shape + H, W = H_origin, W_origin + + for _ in range(256): + current_shape_ceil = get_shape_ceil(H, W) + if abs(current_shape_ceil - shape_ceil) < 0.1: + break + k = shape_ceil / current_shape_ceil + H = int(round(float(H) * k / 64.0) * 64) + W = int(round(float(W) * k / 64.0) * 64) + + if H == H_origin and W == W_origin: + return im + + return resample_image(im, width=W, height=H) + + +def HWC3(x): + assert x.dtype == np.uint8 + if x.ndim == 2: + x = x[:, :, None] + assert x.ndim == 3 + H, W, C = x.shape + assert C == 1 or C == 3 or C == 4 + if C == 3: + return x + if C == 1: + return np.concatenate([x, x, x], axis=2) + if C == 4: + color = x[:, :, 0:3].astype(np.float32) + alpha = x[:, :, 3:4].astype(np.float32) / 255.0 + y = color * alpha + 255.0 * (1.0 - alpha) + y = y.clip(0, 255).astype(np.uint8) + return y + + +def remove_empty_str(items, default=None): + items = [x for x in items if x != ""] + if len(items) == 0 and default is not None: + return [default] + return items + + +def join_prompts(*args, **kwargs): + prompts = [str(x) for x in args if str(x) != ""] + if len(prompts) == 0: + return "" + if len(prompts) == 1: + return prompts[0] + return ', '.join(prompts) + + +def generate_temp_filename(folder='./outputs/', extension='png'): + current_time = datetime.datetime.now() + date_string = current_time.strftime("%Y-%m-%d") + time_string = current_time.strftime("%Y-%m-%d_%H-%M-%S") + random_number = random.randint(1000, 9999) + filename = f"{time_string}_{random_number}.{extension}" + result = os.path.join(folder, date_string, filename) + return date_string, os.path.abspath(result), filename + + +def get_files_from_folder(folder_path, exensions=None, name_filter=None): + if not os.path.isdir(folder_path): + raise ValueError("Folder path is not a valid directory.") + + filenames = [] + + for root, dirs, files in os.walk(folder_path, topdown=False): + relative_path = os.path.relpath(root, folder_path) + if relative_path == ".": + relative_path = "" + for filename in sorted(files, key=lambda s: s.casefold()): + _, file_extension = os.path.splitext(filename) + if (exensions is None or file_extension.lower() in exensions) and (name_filter is None or name_filter in _): + path = os.path.join(relative_path, filename) + filenames.append(path) + + return filenames + + +def calculate_sha256(filename, length=HASH_SHA256_LENGTH) -> str: + hash_sha256 = sha256() + blksize = 1024 * 1024 + + with open(filename, "rb") as f: + for chunk in iter(lambda: f.read(blksize), b""): + hash_sha256.update(chunk) + + res = hash_sha256.hexdigest() + return res[:length] if length else res + + +def quote(text): + if ',' not in str(text) and '\n' not in str(text) and ':' not in str(text): + return text + + return json.dumps(text, ensure_ascii=False) + + +def unquote(text): + if len(text) == 0 or text[0] != '"' or text[-1] != '"': + return text + + try: + return json.loads(text) + except Exception: + return text + + +def unwrap_style_text_from_prompt(style_text, prompt): + """ + Checks the prompt to see if the style text is wrapped around it. If so, + returns True plus the prompt text without the style text. Otherwise, returns + False with the original prompt. + + Note that the "cleaned" version of the style text is only used for matching + purposes here. It isn't returned; the original style text is not modified. + """ + stripped_prompt = prompt + stripped_style_text = style_text + if "{prompt}" in stripped_style_text: + # Work out whether the prompt is wrapped in the style text. If so, we + # return True and the "inner" prompt text that isn't part of the style. + try: + left, right = stripped_style_text.split("{prompt}", 2) + except ValueError as e: + # If the style text has multple "{prompt}"s, we can't split it into + # two parts. This is an error, but we can't do anything about it. + print(f"Unable to compare style text to prompt:\n{style_text}") + print(f"Error: {e}") + return False, prompt, '' + + left_pos = stripped_prompt.find(left) + right_pos = stripped_prompt.find(right) + if 0 <= left_pos < right_pos: + real_prompt = stripped_prompt[left_pos + len(left):right_pos] + prompt = stripped_prompt.replace(left + real_prompt + right, '', 1) + if prompt.startswith(", "): + prompt = prompt[2:] + if prompt.endswith(", "): + prompt = prompt[:-2] + return True, prompt, real_prompt + else: + # Work out whether the given prompt starts with the style text. If so, we + # return True and the prompt text up to where the style text starts. + if stripped_prompt.endswith(stripped_style_text): + prompt = stripped_prompt[: len(stripped_prompt) - len(stripped_style_text)] + if prompt.endswith(", "): + prompt = prompt[:-2] + return True, prompt, prompt + + return False, prompt, '' + + +def extract_original_prompts(style, prompt, negative_prompt): + """ + Takes a style and compares it to the prompt and negative prompt. If the style + matches, returns True plus the prompt and negative prompt with the style text + removed. Otherwise, returns False with the original prompt and negative prompt. + """ + if not style.prompt and not style.negative_prompt: + return False, prompt, negative_prompt + + match_positive, extracted_positive, real_prompt = unwrap_style_text_from_prompt( + style.prompt, prompt + ) + if not match_positive: + return False, prompt, negative_prompt, '' + + match_negative, extracted_negative, _ = unwrap_style_text_from_prompt( + style.negative_prompt, negative_prompt + ) + if not match_negative: + return False, prompt, negative_prompt, '' + + return True, extracted_positive, extracted_negative, real_prompt + + +def extract_styles_from_prompt(prompt, negative_prompt): + extracted = [] + applicable_styles = [] + + for style_name, (style_prompt, style_negative_prompt) in modules.sdxl_styles.styles.items(): + applicable_styles.append(PromptStyle(name=style_name, prompt=style_prompt, negative_prompt=style_negative_prompt)) + + real_prompt = '' + + while True: + found_style = None + + for style in applicable_styles: + is_match, new_prompt, new_neg_prompt, new_real_prompt = extract_original_prompts( + style, prompt, negative_prompt + ) + if is_match: + found_style = style + prompt = new_prompt + negative_prompt = new_neg_prompt + if real_prompt == '' and new_real_prompt != '' and new_real_prompt != prompt: + real_prompt = new_real_prompt + break + + if not found_style: + break + + applicable_styles.remove(found_style) + extracted.append(found_style.name) + + # add prompt expansion if not all styles could be resolved + if prompt != '': + if real_prompt != '': + extracted.append(modules.sdxl_styles.fooocus_expansion) + else: + # find real_prompt when only prompt expansion is selected + first_word = prompt.split(', ')[0] + first_word_positions = [i for i in range(len(prompt)) if prompt.startswith(first_word, i)] + if len(first_word_positions) > 1: + real_prompt = prompt[:first_word_positions[-1]] + extracted.append(modules.sdxl_styles.fooocus_expansion) + if real_prompt.endswith(', '): + real_prompt = real_prompt[:-2] + + return list(reversed(extracted)), real_prompt, negative_prompt + + +class PromptStyle(typing.NamedTuple): + name: str + prompt: str + negative_prompt: str + + +def is_json(data: str) -> bool: + try: + loaded_json = json.loads(data) + assert isinstance(loaded_json, dict) + except (ValueError, AssertionError): + return False + return True + + +def get_file_from_folder_list(name, folders): + for folder in folders: + filename = os.path.abspath(os.path.realpath(os.path.join(folder, name))) + if os.path.isfile(filename): + return filename + + return os.path.abspath(os.path.realpath(os.path.join(folders[0], name))) + + +def ordinal_suffix(number: int) -> str: + return 'th' if 10 <= number % 100 <= 20 else {1: 'st', 2: 'nd', 3: 'rd'}.get(number % 10, 'th') + + +def makedirs_with_log(path): + try: + os.makedirs(path, exist_ok=True) + except OSError as error: + print(f'Directory {path} could not be created, reason: {error}') diff --git a/notification-example.ogg b/notification-example.ogg new file mode 100644 index 0000000000000000000000000000000000000000..fe4291d0682e7401b014c23ace4bfafac23f5137 Binary files /dev/null and b/notification-example.ogg differ diff --git a/presets/anime.json b/presets/anime.json new file mode 100644 index 0000000000000000000000000000000000000000..8bd2813bcb25cdd5ba5cacfa335eeba3d893dd0c --- /dev/null +++ b/presets/anime.json @@ -0,0 +1,46 @@ +{ + "default_model": "animaPencilXL_v100.safetensors", + "default_refiner": "None", + "default_refiner_switch": 0.5, + "default_loras": [ + [ + "None", + 1.0 + ], + [ + "None", + 1.0 + ], + [ + "None", + 1.0 + ], + [ + "None", + 1.0 + ], + [ + "None", + 1.0 + ] + ], + "default_cfg_scale": 7.0, + "default_sample_sharpness": 2.0, + "default_sampler": "dpmpp_2m_sde_gpu", + "default_scheduler": "karras", + "default_performance": "Speed", + "default_prompt": "1girl, ", + "default_prompt_negative": "", + "default_styles": [ + "Fooocus V2", + "Fooocus Negative", + "Fooocus Masterpiece" + ], + "default_aspect_ratio": "896*1152", + "checkpoint_downloads": { + "animaPencilXL_v100.safetensors": "https://huggingface.co/lllyasviel/fav_models/resolve/main/fav/animaPencilXL_v100.safetensors" + }, + "embeddings_downloads": {}, + "lora_downloads": {}, + "previous_default_models": [] +} \ No newline at end of file diff --git a/presets/default.json b/presets/default.json new file mode 100644 index 0000000000000000000000000000000000000000..7930c92f0af29d24c9020ae454734692265fabfd --- /dev/null +++ b/presets/default.json @@ -0,0 +1,54 @@ +{ + "default_model": "juggernautXL_v8Rundiffusion.safetensors", + "default_refiner": "None", + "default_refiner_switch": 0.5, + "default_loras": [ + [ + "sd_xl_offset_example-lora_1.0.safetensors", + 0.1 + ], + [ + "None", + 1.0 + ], + [ + "None", + 1.0 + ], + [ + "None", + 1.0 + ], + [ + "None", + 1.0 + ] + ], + "default_cfg_scale": 4.0, + "default_sample_sharpness": 2.0, + "default_sampler": "dpmpp_2m_sde_gpu", + "default_scheduler": "karras", + "default_performance": "Speed", + "default_prompt": "", + "default_prompt_negative": "", + "default_styles": [ + "Fooocus V2", + "Fooocus Enhance", + "Fooocus Sharp" + ], + "default_aspect_ratio": "1152*896", + "checkpoint_downloads": { + "juggernautXL_v8Rundiffusion.safetensors": "https://huggingface.co/lllyasviel/fav_models/resolve/main/fav/juggernautXL_v8Rundiffusion.safetensors" + }, + "embeddings_downloads": {}, + "lora_downloads": { + "sd_xl_offset_example-lora_1.0.safetensors": "https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_offset_example-lora_1.0.safetensors" + }, + "previous_default_models": [ + "juggernautXL_version8Rundiffusion.safetensors", + "juggernautXL_version7Rundiffusion.safetensors", + "juggernautXL_v7Rundiffusion.safetensors", + "juggernautXL_version6Rundiffusion.safetensors", + "juggernautXL_v6Rundiffusion.safetensors" + ] +} \ No newline at end of file diff --git a/presets/lcm.json b/presets/lcm.json new file mode 100644 index 0000000000000000000000000000000000000000..3897f8812a8faedad89d135cc1121632c4f77b7d --- /dev/null +++ b/presets/lcm.json @@ -0,0 +1,52 @@ +{ + "default_model": "juggernautXL_v8Rundiffusion.safetensors", + "default_refiner": "None", + "default_refiner_switch": 0.5, + "default_loras": [ + [ + "None", + 1.0 + ], + [ + "None", + 1.0 + ], + [ + "None", + 1.0 + ], + [ + "None", + 1.0 + ], + [ + "None", + 1.0 + ] + ], + "default_cfg_scale": 4.0, + "default_sample_sharpness": 2.0, + "default_sampler": "dpmpp_2m_sde_gpu", + "default_scheduler": "karras", + "default_performance": "Extreme Speed", + "default_prompt": "", + "default_prompt_negative": "", + "default_styles": [ + "Fooocus V2", + "Fooocus Enhance", + "Fooocus Sharp" + ], + "default_aspect_ratio": "1152*896", + "checkpoint_downloads": { + "juggernautXL_v8Rundiffusion.safetensors": "https://huggingface.co/lllyasviel/fav_models/resolve/main/fav/juggernautXL_v8Rundiffusion.safetensors" + }, + "embeddings_downloads": {}, + "lora_downloads": {}, + "previous_default_models": [ + "juggernautXL_version8Rundiffusion.safetensors", + "juggernautXL_version7Rundiffusion.safetensors", + "juggernautXL_v7Rundiffusion.safetensors", + "juggernautXL_version6Rundiffusion.safetensors", + "juggernautXL_v6Rundiffusion.safetensors" + ] +} \ No newline at end of file diff --git a/presets/realistic.json b/presets/realistic.json new file mode 100644 index 0000000000000000000000000000000000000000..7799c96a43f18dacac1bccd6907783601b2fef2a --- /dev/null +++ b/presets/realistic.json @@ -0,0 +1,48 @@ +{ + "default_model": "realisticStockPhoto_v20.safetensors", + "default_refiner": "", + "default_refiner_switch": 0.5, + "default_loras": [ + [ + "SDXL_FILM_PHOTOGRAPHY_STYLE_BetaV0.4.safetensors", + 0.25 + ], + [ + "None", + 1.0 + ], + [ + "None", + 1.0 + ], + [ + "None", + 1.0 + ], + [ + "None", + 1.0 + ] + ], + "default_cfg_scale": 3.0, + "default_sample_sharpness": 2.0, + "default_sampler": "dpmpp_2m_sde_gpu", + "default_scheduler": "karras", + "default_performance": "Speed", + "default_prompt": "", + "default_prompt_negative": "unrealistic, saturated, high contrast, big nose, painting, drawing, sketch, cartoon, anime, manga, render, CG, 3d, watermark, signature, label", + "default_styles": [ + "Fooocus V2", + "Fooocus Photograph", + "Fooocus Negative" + ], + "default_aspect_ratio": "896*1152", + "checkpoint_downloads": { + "realisticStockPhoto_v20.safetensors": "https://huggingface.co/lllyasviel/fav_models/resolve/main/fav/realisticStockPhoto_v20.safetensors" + }, + "embeddings_downloads": {}, + "lora_downloads": { + "SDXL_FILM_PHOTOGRAPHY_STYLE_BetaV0.4.safetensors": "https://huggingface.co/lllyasviel/fav_models/resolve/main/fav/SDXL_FILM_PHOTOGRAPHY_STYLE_BetaV0.4.safetensors" + }, + "previous_default_models": ["realisticStockPhoto_v10.safetensors"] +} \ No newline at end of file diff --git a/presets/sai.json b/presets/sai.json new file mode 100644 index 0000000000000000000000000000000000000000..fecf047bfcbefa6747cc1908f495322c09ad71bd --- /dev/null +++ b/presets/sai.json @@ -0,0 +1,48 @@ +{ + "default_model": "sd_xl_base_1.0_0.9vae.safetensors", + "default_refiner": "sd_xl_refiner_1.0_0.9vae.safetensors", + "default_refiner_switch": 0.75, + "default_loras": [ + [ + "sd_xl_offset_example-lora_1.0.safetensors", + 0.5 + ], + [ + "None", + 1.0 + ], + [ + "None", + 1.0 + ], + [ + "None", + 1.0 + ], + [ + "None", + 1.0 + ] + ], + "default_cfg_scale": 7.0, + "default_sample_sharpness": 2.0, + "default_sampler": "dpmpp_2m_sde_gpu", + "default_scheduler": "karras", + "default_performance": "Speed", + "default_prompt": "", + "default_prompt_negative": "", + "default_styles": [ + "Fooocus V2", + "Fooocus Cinematic" + ], + "default_aspect_ratio": "1152*896", + "checkpoint_downloads": { + "sd_xl_base_1.0_0.9vae.safetensors": "https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_base_1.0_0.9vae.safetensors", + "sd_xl_refiner_1.0_0.9vae.safetensors": "https://huggingface.co/stabilityai/stable-diffusion-xl-refiner-1.0/resolve/main/sd_xl_refiner_1.0_0.9vae.safetensors" + }, + "embeddings_downloads": {}, + "lora_downloads": { + "sd_xl_offset_example-lora_1.0.safetensors": "https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_offset_example-lora_1.0.safetensors" + }, + "previous_default_models": [] +} \ No newline at end of file diff --git a/readme.md b/readme.md new file mode 100644 index 0000000000000000000000000000000000000000..0bfee5b4cfe2c633cf379d0aaf92fdc75ac07506 --- /dev/null +++ b/readme.md @@ -0,0 +1,438 @@ +
+ + +**Non-cherry-picked** random batch by just typing two words "forest elf", + +without any parameter tweaking, without any strange prompt tags. + +See also **non-cherry-picked** generalization and diversity tests [here](https://github.com/lllyasviel/Fooocus/discussions/2067) and [here](https://github.com/lllyasviel/Fooocus/discussions/808) and [here](https://github.com/lllyasviel/Fooocus/discussions/679) and [here](https://github.com/lllyasviel/Fooocus/discussions/679#realistic). + +In the entire open source community, only Fooocus can achieve this level of **non-cherry-picked** quality. + +
+ + +# Fooocus + +Fooocus is an image generating software (based on [Gradio](https://www.gradio.app/)). + +Fooocus is a rethinking of Stable Diffusion and Midjourney’s designs: + +* Learned from Stable Diffusion, the software is offline, open source, and free. + +* Learned from Midjourney, the manual tweaking is not needed, and users only need to focus on the prompts and images. + +Fooocus has included and automated [lots of inner optimizations and quality improvements](#tech_list). Users can forget all those difficult technical parameters, and just enjoy the interaction between human and computer to "explore new mediums of thought and expanding the imaginative powers of the human species" `[1]`. + +Fooocus has simplified the installation. Between pressing "download" and generating the first image, the number of needed mouse clicks is strictly limited to less than 3. Minimal GPU memory requirement is 4GB (Nvidia). + +`[1]` David Holz, 2019. + +**Recently many fake websites exist on Google when you search “fooocus”. Do not trust those – here is the only official source of Fooocus.** + +## [Installing Fooocus](#download) + +# Moving from Midjourney to Fooocus + +Using Fooocus is as easy as (probably easier than) Midjourney – but this does not mean we lack functionality. Below are the details. + +| Midjourney | Fooocus | +| - | - | +| High-quality text-to-image without needing much prompt engineering or parameter tuning.
(Unknown method) | High-quality text-to-image without needing much prompt engineering or parameter tuning.
(Fooocus has an offline GPT-2 based prompt processing engine and lots of sampling improvements so that results are always beautiful, no matter if your prompt is as short as “house in garden” or as long as 1000 words) | +| V1 V2 V3 V4 | Input Image -> Upscale or Variation -> Vary (Subtle) / Vary (Strong)| +| U1 U2 U3 U4 | Input Image -> Upscale or Variation -> Upscale (1.5x) / Upscale (2x) | +| Inpaint / Up / Down / Left / Right (Pan) | Input Image -> Inpaint or Outpaint -> Inpaint / Up / Down / Left / Right
(Fooocus uses its own inpaint algorithm and inpaint models so that results are more satisfying than all other software that uses standard SDXL inpaint method/model) | +| Image Prompt | Input Image -> Image Prompt
(Fooocus uses its own image prompt algorithm so that result quality and prompt understanding are more satisfying than all other software that uses standard SDXL methods like standard IP-Adapters or Revisions) | +| --style | Advanced -> Style | +| --stylize | Advanced -> Advanced -> Guidance | +| --niji | [Multiple launchers: "run.bat", "run_anime.bat", and "run_realistic.bat".](https://github.com/lllyasviel/Fooocus/discussions/679)
Fooocus support SDXL models on Civitai
(You can google search “Civitai” if you do not know about it) | +| --quality | Advanced -> Quality | +| --repeat | Advanced -> Image Number | +| Multi Prompts (::) | Just use multiple lines of prompts | +| Prompt Weights | You can use " I am (happy:1.5)".
Fooocus uses A1111's reweighting algorithm so that results are better than ComfyUI if users directly copy prompts from Civitai. (Because if prompts are written in ComfyUI's reweighting, users are less likely to copy prompt texts as they prefer dragging files)
To use embedding, you can use "(embedding:file_name:1.1)" | +| --no | Advanced -> Negative Prompt | +| --ar | Advanced -> Aspect Ratios | +| InsightFace | Input Image -> Image Prompt -> Advanced -> FaceSwap | +| Describe | Input Image -> Describe | + +We also have a few things borrowed from the best parts of LeonardoAI: + +| LeonardoAI | Fooocus | +| - | - | +| Prompt Magic | Advanced -> Style -> Fooocus V2 | +| Advanced Sampler Parameters (like Contrast/Sharpness/etc) | Advanced -> Advanced -> Sampling Sharpness / etc | +| User-friendly ControlNets | Input Image -> Image Prompt -> Advanced | + +Fooocus also developed many "fooocus-only" features for advanced users to get perfect results. [Click here to browse the advanced features.](https://github.com/lllyasviel/Fooocus/discussions/117) + +# Download + +### Windows + +You can directly download Fooocus with: + +**[>>> Click here to download <<<](https://github.com/lllyasviel/Fooocus/releases/download/release/Fooocus_win64_2-1-831.7z)** + +After you download the file, please uncompress it and then run the "run.bat". + +![image](https://github.com/lllyasviel/Fooocus/assets/19834515/c49269c4-c274-4893-b368-047c401cc58c) + +The first time you launch the software, it will automatically download models: + +1. It will download [default models](#models) to the folder "Fooocus\models\checkpoints" given different presets. You can download them in advance if you do not want automatic download. +2. Note that if you use inpaint, at the first time you inpaint an image, it will download [Fooocus's own inpaint control model from here](https://huggingface.co/lllyasviel/fooocus_inpaint/resolve/main/inpaint_v26.fooocus.patch) as the file "Fooocus\models\inpaint\inpaint_v26.fooocus.patch" (the size of this file is 1.28GB). + +After Fooocus 2.1.60, you will also have `run_anime.bat` and `run_realistic.bat`. They are different model presets (and require different models, but they will be automatically downloaded). [Check here for more details](https://github.com/lllyasviel/Fooocus/discussions/679). + +![image](https://github.com/lllyasviel/Fooocus/assets/19834515/d386f817-4bd7-490c-ad89-c1e228c23447) + +If you already have these files, you can copy them to the above locations to speed up installation. + +Note that if you see **"MetadataIncompleteBuffer" or "PytorchStreamReader"**, then your model files are corrupted. Please download models again. + +Below is a test on a relatively low-end laptop with **16GB System RAM** and **6GB VRAM** (Nvidia 3060 laptop). The speed on this machine is about 1.35 seconds per iteration. Pretty impressive – nowadays laptops with 3060 are usually at very acceptable price. + +![image](https://github.com/lllyasviel/Fooocus/assets/19834515/938737a5-b105-4f19-b051-81356cb7c495) + +Besides, recently many other software report that Nvidia driver above 532 is sometimes 10x slower than Nvidia driver 531. If your generation time is very long, consider download [Nvidia Driver 531 Laptop](https://www.nvidia.com/download/driverResults.aspx/199991/en-us/) or [Nvidia Driver 531 Desktop](https://www.nvidia.com/download/driverResults.aspx/199990/en-us/). + +Note that the minimal requirement is **4GB Nvidia GPU memory (4GB VRAM)** and **8GB system memory (8GB RAM)**. This requires using Microsoft’s Virtual Swap technique, which is automatically enabled by your Windows installation in most cases, so you often do not need to do anything about it. However, if you are not sure, or if you manually turned it off (would anyone really do that?), or **if you see any "RuntimeError: CPUAllocator"**, you can enable it here: + +
+Click here to see the image instructions. + +![image](https://github.com/lllyasviel/Fooocus/assets/19834515/2a06b130-fe9b-4504-94f1-2763be4476e9) + +**And make sure that you have at least 40GB free space on each drive if you still see "RuntimeError: CPUAllocator" !** + +
+ +Please open an issue if you use similar devices but still cannot achieve acceptable performances. + +Note that the [minimal requirement](#minimal-requirement) for different platforms is different. + +See also the common problems and troubleshoots [here](troubleshoot.md). + +### Colab + +(Last tested - 2023 Dec 12) + +| Colab | Info +| --- | --- | +[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/lllyasviel/Fooocus/blob/main/fooocus_colab.ipynb) | Fooocus Official + +In Colab, you can modify the last line to `!python entry_with_update.py --share` or `!python entry_with_update.py --preset anime --share` or `!python entry_with_update.py --preset realistic --share` for Fooocus Default/Anime/Realistic Edition. + +Note that this Colab will disable refiner by default because Colab free's resources are relatively limited (and some "big" features like image prompt may cause free-tier Colab to disconnect). We make sure that basic text-to-image is always working on free-tier Colab. + +Thanks to [camenduru](https://github.com/camenduru)! + +### Linux (Using Anaconda) + +If you want to use Anaconda/Miniconda, you can + + git clone https://github.com/lllyasviel/Fooocus.git + cd Fooocus + conda env create -f environment.yaml + conda activate fooocus + pip install -r requirements_versions.txt + +Then download the models: download [default models](#models) to the folder "Fooocus\models\checkpoints". **Or let Fooocus automatically download the models** using the launcher: + + conda activate fooocus + python entry_with_update.py + +Or, if you want to open a remote port, use + + conda activate fooocus + python entry_with_update.py --listen + +Use `python entry_with_update.py --preset anime` or `python entry_with_update.py --preset realistic` for Fooocus Anime/Realistic Edition. + +### Linux (Using Python Venv) + +Your Linux needs to have **Python 3.10** installed, and let's say your Python can be called with the command **python3** with your venv system working; you can + + git clone https://github.com/lllyasviel/Fooocus.git + cd Fooocus + python3 -m venv fooocus_env + source fooocus_env/bin/activate + pip install -r requirements_versions.txt + +See the above sections for model downloads. You can launch the software with: + + source fooocus_env/bin/activate + python entry_with_update.py + +Or, if you want to open a remote port, use + + source fooocus_env/bin/activate + python entry_with_update.py --listen + +Use `python entry_with_update.py --preset anime` or `python entry_with_update.py --preset realistic` for Fooocus Anime/Realistic Edition. + +### Linux (Using native system Python) + +If you know what you are doing, and your Linux already has **Python 3.10** installed, and your Python can be called with the command **python3** (and Pip with **pip3**), you can + + git clone https://github.com/lllyasviel/Fooocus.git + cd Fooocus + pip3 install -r requirements_versions.txt + +See the above sections for model downloads. You can launch the software with: + + python3 entry_with_update.py + +Or, if you want to open a remote port, use + + python3 entry_with_update.py --listen + +Use `python entry_with_update.py --preset anime` or `python entry_with_update.py --preset realistic` for Fooocus Anime/Realistic Edition. + +### Linux (AMD GPUs) + +Note that the [minimal requirement](#minimal-requirement) for different platforms is different. + +Same with the above instructions. You need to change torch to the AMD version + + pip uninstall torch torchvision torchaudio torchtext functorch xformers + pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/rocm5.6 + +AMD is not intensively tested, however. The AMD support is in beta. + +Use `python entry_with_update.py --preset anime` or `python entry_with_update.py --preset realistic` for Fooocus Anime/Realistic Edition. + +### Windows (AMD GPUs) + +Note that the [minimal requirement](#minimal-requirement) for different platforms is different. + +Same with Windows. Download the software and edit the content of `run.bat` as: + + .\python_embeded\python.exe -m pip uninstall torch torchvision torchaudio torchtext functorch xformers -y + .\python_embeded\python.exe -m pip install torch-directml + .\python_embeded\python.exe -s Fooocus\entry_with_update.py --directml + pause + +Then run the `run.bat`. + +AMD is not intensively tested, however. The AMD support is in beta. + +For AMD, use `.\python_embeded\python.exe entry_with_update.py --directml --preset anime` or `.\python_embeded\python.exe entry_with_update.py --directml --preset realistic` for Fooocus Anime/Realistic Edition. + +### Mac + +Note that the [minimal requirement](#minimal-requirement) for different platforms is different. + +Mac is not intensively tested. Below is an unofficial guideline for using Mac. You can discuss problems [here](https://github.com/lllyasviel/Fooocus/pull/129). + +You can install Fooocus on Apple Mac silicon (M1 or M2) with macOS 'Catalina' or a newer version. Fooocus runs on Apple silicon computers via [PyTorch](https://pytorch.org/get-started/locally/) MPS device acceleration. Mac Silicon computers don't come with a dedicated graphics card, resulting in significantly longer image processing times compared to computers with dedicated graphics cards. + +1. Install the conda package manager and pytorch nightly. Read the [Accelerated PyTorch training on Mac](https://developer.apple.com/metal/pytorch/) Apple Developer guide for instructions. Make sure pytorch recognizes your MPS device. +1. Open the macOS Terminal app and clone this repository with `git clone https://github.com/lllyasviel/Fooocus.git`. +1. Change to the new Fooocus directory, `cd Fooocus`. +1. Create a new conda environment, `conda env create -f environment.yaml`. +1. Activate your new conda environment, `conda activate fooocus`. +1. Install the packages required by Fooocus, `pip install -r requirements_versions.txt`. +1. Launch Fooocus by running `python entry_with_update.py`. (Some Mac M2 users may need `python entry_with_update.py --disable-offload-from-vram` to speed up model loading/unloading.) The first time you run Fooocus, it will automatically download the Stable Diffusion SDXL models and will take a significant amount of time, depending on your internet connection. + +Use `python entry_with_update.py --preset anime` or `python entry_with_update.py --preset realistic` for Fooocus Anime/Realistic Edition. + +### Docker + +See [docker.md](docker.md) + +### Download Previous Version + +See the guidelines [here](https://github.com/lllyasviel/Fooocus/discussions/1405). + +## Minimal Requirement + +Below is the minimal requirement for running Fooocus locally. If your device capability is lower than this spec, you may not be able to use Fooocus locally. (Please let us know, in any case, if your device capability is lower but Fooocus still works.) + +| Operating System | GPU | Minimal GPU Memory | Minimal System Memory | [System Swap](troubleshoot.md) | Note | +|-------------------|------------------------------|------------------------------|---------------------------|--------------------------------|----------------------------------------------------------------------------| +| Windows/Linux | Nvidia RTX 4XXX | 4GB | 8GB | Required | fastest | +| Windows/Linux | Nvidia RTX 3XXX | 4GB | 8GB | Required | usually faster than RTX 2XXX | +| Windows/Linux | Nvidia RTX 2XXX | 4GB | 8GB | Required | usually faster than GTX 1XXX | +| Windows/Linux | Nvidia GTX 1XXX | 8GB (* 6GB uncertain) | 8GB | Required | only marginally faster than CPU | +| Windows/Linux | Nvidia GTX 9XX | 8GB | 8GB | Required | faster or slower than CPU | +| Windows/Linux | Nvidia GTX < 9XX | Not supported | / | / | / | +| Windows | AMD GPU | 8GB (updated 2023 Dec 30) | 8GB | Required | via DirectML (* ROCm is on hold), about 3x slower than Nvidia RTX 3XXX | +| Linux | AMD GPU | 8GB | 8GB | Required | via ROCm, about 1.5x slower than Nvidia RTX 3XXX | +| Mac | M1/M2 MPS | Shared | Shared | Shared | about 9x slower than Nvidia RTX 3XXX | +| Windows/Linux/Mac | only use CPU | 0GB | 32GB | Required | about 17x slower than Nvidia RTX 3XXX | + +* AMD GPU ROCm (on hold): The AMD is still working on supporting ROCm on Windows. + +* Nvidia GTX 1XXX 6GB uncertain: Some people report 6GB success on GTX 10XX, but some other people report failure cases. + +*Note that Fooocus is only for extremely high quality image generating. We will not support smaller models to reduce the requirement and sacrifice result quality.* + +## Troubleshoot + +See the common problems [here](troubleshoot.md). + +## Default Models + + +Given different goals, the default models and configs of Fooocus are different: + +| Task | Windows | Linux args | Main Model | Refiner | Config | +| --- | --- | --- | --- | --- |--------------------------------------------------------------------------------| +| General | run.bat | | juggernautXL_v8Rundiffusion | not used | [here](https://github.com/lllyasviel/Fooocus/blob/main/presets/default.json) | +| Realistic | run_realistic.bat | --preset realistic | realisticStockPhoto_v20 | not used | [here](https://github.com/lllyasviel/Fooocus/blob/main/presets/realistic.json) | +| Anime | run_anime.bat | --preset anime | animaPencilXL_v100 | not used | [here](https://github.com/lllyasviel/Fooocus/blob/main/presets/anime.json) | + +Note that the download is **automatic** - you do not need to do anything if the internet connection is okay. However, you can download them manually if you (or move them from somewhere else) have your own preparation. + +## UI Access and Authentication +In addition to running on localhost, Fooocus can also expose its UI in two ways: +* Local UI listener: use `--listen` (specify port e.g. with `--port 8888`). +* API access: use `--share` (registers an endpoint at `.gradio.live`). + +In both ways the access is unauthenticated by default. You can add basic authentication by creating a file called `auth.json` in the main directory, which contains a list of JSON objects with the keys `user` and `pass` (see example in [auth-example.json](./auth-example.json)). + +## List of "Hidden" Tricks + + +The below things are already inside the software, and **users do not need to do anything about these**. + +1. GPT2-based [prompt expansion as a dynamic style "Fooocus V2".](https://github.com/lllyasviel/Fooocus/discussions/117#raw) (similar to Midjourney's hidden pre-processing and "raw" mode, or the LeonardoAI's Prompt Magic). +2. Native refiner swap inside one single k-sampler. The advantage is that the refiner model can now reuse the base model's momentum (or ODE's history parameters) collected from k-sampling to achieve more coherent sampling. In Automatic1111's high-res fix and ComfyUI's node system, the base model and refiner use two independent k-samplers, which means the momentum is largely wasted, and the sampling continuity is broken. Fooocus uses its own advanced k-diffusion sampling that ensures seamless, native, and continuous swap in a refiner setup. (Update Aug 13: Actually, I discussed this with Automatic1111 several days ago, and it seems that the “native refiner swap inside one single k-sampler” is [merged]( https://github.com/AUTOMATIC1111/stable-diffusion-webui/pull/12371) into the dev branch of webui. Great!) +3. Negative ADM guidance. Because the highest resolution level of XL Base does not have cross attentions, the positive and negative signals for XL's highest resolution level cannot receive enough contrasts during the CFG sampling, causing the results to look a bit plastic or overly smooth in certain cases. Fortunately, since the XL's highest resolution level is still conditioned on image aspect ratios (ADM), we can modify the adm on the positive/negative side to compensate for the lack of CFG contrast in the highest resolution level. (Update Aug 16, the IOS App [Draw Things](https://apps.apple.com/us/app/draw-things-ai-generation/id6444050820) will support Negative ADM Guidance. Great!) +4. We implemented a carefully tuned variation of Section 5.1 of ["Improving Sample Quality of Diffusion Models Using Self-Attention Guidance"](https://arxiv.org/pdf/2210.00939.pdf). The weight is set to very low, but this is Fooocus's final guarantee to make sure that the XL will never yield an overly smooth or plastic appearance (examples [here](https://github.com/lllyasviel/Fooocus/discussions/117#sharpness)). This can almost eliminate all cases for which XL still occasionally produces overly smooth results, even with negative ADM guidance. (Update 2023 Aug 18, the Gaussian kernel of SAG is changed to an anisotropic kernel for better structure preservation and fewer artifacts.) +5. We modified the style templates a bit and added the "cinematic-default". +6. We tested the "sd_xl_offset_example-lora_1.0.safetensors" and it seems that when the lora weight is below 0.5, the results are always better than XL without lora. +7. The parameters of samplers are carefully tuned. +8. Because XL uses positional encoding for generation resolution, images generated by several fixed resolutions look a bit better than those from arbitrary resolutions (because the positional encoding is not very good at handling int numbers that are unseen during training). This suggests that the resolutions in UI may be hard coded for best results. +9. Separated prompts for two different text encoders seem unnecessary. Separated prompts for the base model and refiner may work, but the effects are random, and we refrain from implementing this. +10. The DPM family seems well-suited for XL since XL sometimes generates overly smooth texture, but the DPM family sometimes generates overly dense detail in texture. Their joint effect looks neutral and appealing to human perception. +11. A carefully designed system for balancing multiple styles as well as prompt expansion. +12. Using automatic1111's method to normalize prompt emphasizing. This significantly improves results when users directly copy prompts from civitai. +13. The joint swap system of the refiner now also supports img2img and upscale in a seamless way. +14. CFG Scale and TSNR correction (tuned for SDXL) when CFG is bigger than 10. + +## Customization + +After the first time you run Fooocus, a config file will be generated at `Fooocus\config.txt`. This file can be edited to change the model path or default parameters. + +For example, an edited `Fooocus\config.txt` (this file will be generated after the first launch) may look like this: + +```json +{ + "path_checkpoints": "D:\\Fooocus\\models\\checkpoints", + "path_loras": "D:\\Fooocus\\models\\loras", + "path_embeddings": "D:\\Fooocus\\models\\embeddings", + "path_vae_approx": "D:\\Fooocus\\models\\vae_approx", + "path_upscale_models": "D:\\Fooocus\\models\\upscale_models", + "path_inpaint": "D:\\Fooocus\\models\\inpaint", + "path_controlnet": "D:\\Fooocus\\models\\controlnet", + "path_clip_vision": "D:\\Fooocus\\models\\clip_vision", + "path_fooocus_expansion": "D:\\Fooocus\\models\\prompt_expansion\\fooocus_expansion", + "path_outputs": "D:\\Fooocus\\outputs", + "default_model": "realisticStockPhoto_v10.safetensors", + "default_refiner": "", + "default_loras": [["lora_filename_1.safetensors", 0.5], ["lora_filename_2.safetensors", 0.5]], + "default_cfg_scale": 3.0, + "default_sampler": "dpmpp_2m", + "default_scheduler": "karras", + "default_negative_prompt": "low quality", + "default_positive_prompt": "", + "default_styles": [ + "Fooocus V2", + "Fooocus Photograph", + "Fooocus Negative" + ] +} +``` + +Many other keys, formats, and examples are in `Fooocus\config_modification_tutorial.txt` (this file will be generated after the first launch). + +Consider twice before you really change the config. If you find yourself breaking things, just delete `Fooocus\config.txt`. Fooocus will go back to default. + +A safer way is just to try "run_anime.bat" or "run_realistic.bat" - they should already be good enough for different tasks. + +~Note that `user_path_config.txt` is deprecated and will be removed soon.~ (Edit: it is already removed.) + +### All CMD Flags + +``` +entry_with_update.py [-h] [--listen [IP]] [--port PORT] + [--disable-header-check [ORIGIN]] + [--web-upload-size WEB_UPLOAD_SIZE] + [--external-working-path PATH [PATH ...]] + [--output-path OUTPUT_PATH] [--temp-path TEMP_PATH] + [--cache-path CACHE_PATH] [--in-browser] + [--disable-in-browser] [--gpu-device-id DEVICE_ID] + [--async-cuda-allocation | --disable-async-cuda-allocation] + [--disable-attention-upcast] [--all-in-fp32 | --all-in-fp16] + [--unet-in-bf16 | --unet-in-fp16 | --unet-in-fp8-e4m3fn | --unet-in-fp8-e5m2] + [--vae-in-fp16 | --vae-in-fp32 | --vae-in-bf16] + [--clip-in-fp8-e4m3fn | --clip-in-fp8-e5m2 | --clip-in-fp16 | --clip-in-fp32] + [--directml [DIRECTML_DEVICE]] [--disable-ipex-hijack] + [--preview-option [none,auto,fast,taesd]] + [--attention-split | --attention-quad | --attention-pytorch] + [--disable-xformers] + [--always-gpu | --always-high-vram | --always-normal-vram | + --always-low-vram | --always-no-vram | --always-cpu [CPU_NUM_THREADS]] + [--always-offload-from-vram] [--disable-server-log] + [--debug-mode] [--is-windows-embedded-python] + [--disable-server-info] [--share] [--preset PRESET] + [--language LANGUAGE] [--disable-offload-from-vram] + [--theme THEME] [--disable-image-log] +``` + +## Advanced Features + +[Click here to browse the advanced features.](https://github.com/lllyasviel/Fooocus/discussions/117) + +Fooocus also has many community forks, just like SD-WebUI's [vladmandic/automatic](https://github.com/vladmandic/automatic) and [anapnoe/stable-diffusion-webui-ux](https://github.com/anapnoe/stable-diffusion-webui-ux), for enthusiastic users who want to try! + +| Fooocus' forks | +| - | +| [fenneishi/Fooocus-Control](https://github.com/fenneishi/Fooocus-Control)
[runew0lf/RuinedFooocus](https://github.com/runew0lf/RuinedFooocus)
[MoonRide303/Fooocus-MRE](https://github.com/MoonRide303/Fooocus-MRE)
[metercai/SimpleSDXL](https://github.com/metercai/SimpleSDXL)
and so on ... | + +See also [About Forking and Promotion of Forks](https://github.com/lllyasviel/Fooocus/discussions/699). + +## Thanks + +Special thanks to [twri](https://github.com/twri) and [3Diva](https://github.com/3Diva) and [Marc K3nt3L](https://github.com/K3nt3L) for creating additional SDXL styles available in Fooocus. Thanks [daswer123](https://github.com/daswer123) for contributing the Canvas Zoom! + +## Update Log + +The log is [here](update_log.md). + +## Localization/Translation/I18N + +**We need your help!** Please help translate Fooocus into international languages. + +You can put json files in the `language` folder to translate the user interface. + +For example, below is the content of `Fooocus/language/example.json`: + +```json +{ + "Generate": "生成", + "Input Image": "入力画像", + "Advanced": "고급", + "SAI 3D Model": "SAI 3D Modèle" +} +``` + +If you add `--language example` arg, Fooocus will read `Fooocus/language/example.json` to translate the UI. + +For example, you can edit the ending line of Windows `run.bat` as + + .\python_embeded\python.exe -s Fooocus\entry_with_update.py --language example + +Or `run_anime.bat` as + + .\python_embeded\python.exe -s Fooocus\entry_with_update.py --language example --preset anime + +Or `run_realistic.bat` as + + .\python_embeded\python.exe -s Fooocus\entry_with_update.py --language example --preset realistic + +For practical translation, you may create your own file like `Fooocus/language/jp.json` or `Fooocus/language/cn.json` and then use flag `--language jp` or `--language cn`. Apparently, these files do not exist now. **We need your help to create these files!** + +Note that if no `--language` is given and at the same time `Fooocus/language/default.json` exists, Fooocus will always load `Fooocus/language/default.json` for translation. By default, the file `Fooocus/language/default.json` does not exist. diff --git a/requirements_docker.txt b/requirements_docker.txt new file mode 100644 index 0000000000000000000000000000000000000000..3cf4aa89d37e48eabd1085a2a54f293adcfad3fb --- /dev/null +++ b/requirements_docker.txt @@ -0,0 +1,5 @@ +torch==2.0.1 +torchvision==0.15.2 +torchaudio==2.0.2 +torchtext==0.15.2 +torchdata==0.6.1 diff --git a/requirements_versions.txt b/requirements_versions.txt new file mode 100644 index 0000000000000000000000000000000000000000..b2111c1f5d80a8647743713f6a9013a12770c4fc --- /dev/null +++ b/requirements_versions.txt @@ -0,0 +1,18 @@ +torchsde==0.2.5 +einops==0.4.1 +transformers==4.30.2 +safetensors==0.3.1 +accelerate==0.21.0 +pyyaml==6.0 +Pillow==9.2.0 +scipy==1.9.3 +tqdm==4.64.1 +psutil==5.9.5 +pytorch_lightning==1.9.4 +omegaconf==2.2.3 +gradio==3.41.2 +pygit2==1.12.2 +opencv-contrib-python==4.8.0.74 +httpx==0.24.1 +onnxruntime==1.16.3 +timm==0.9.2 diff --git a/sdxl_styles/samples/abstract_expressionism.jpg b/sdxl_styles/samples/abstract_expressionism.jpg new file mode 100644 index 0000000000000000000000000000000000000000..226b8fa7b02d885be6b054fb545c69157c7555fa Binary files /dev/null and b/sdxl_styles/samples/abstract_expressionism.jpg differ diff --git a/sdxl_styles/samples/academia.jpg b/sdxl_styles/samples/academia.jpg new file mode 100644 index 0000000000000000000000000000000000000000..26a700d000bb2a2de08c2ec637fa763f9dd52673 Binary files /dev/null and b/sdxl_styles/samples/academia.jpg differ diff --git a/sdxl_styles/samples/action_figure.jpg b/sdxl_styles/samples/action_figure.jpg new file mode 100644 index 0000000000000000000000000000000000000000..fcd1c092d218f6d1299f4624427f902cbfb11e5f Binary files /dev/null and b/sdxl_styles/samples/action_figure.jpg differ diff --git a/sdxl_styles/samples/adorable_3d_character.jpg b/sdxl_styles/samples/adorable_3d_character.jpg new file mode 100644 index 0000000000000000000000000000000000000000..493bfb8f5b9cbef2e145e6abda6779bd6b4ebef8 Binary files /dev/null and b/sdxl_styles/samples/adorable_3d_character.jpg differ diff --git a/sdxl_styles/samples/adorable_kawaii.jpg b/sdxl_styles/samples/adorable_kawaii.jpg new file mode 100644 index 0000000000000000000000000000000000000000..52bc77331fec6475a957139275e52356976c654b Binary files /dev/null and b/sdxl_styles/samples/adorable_kawaii.jpg differ diff --git a/sdxl_styles/samples/ads_advertising.jpg b/sdxl_styles/samples/ads_advertising.jpg new file mode 100644 index 0000000000000000000000000000000000000000..40631d4c1a9c9d31c241c0087924d62a687d5a00 Binary files /dev/null and b/sdxl_styles/samples/ads_advertising.jpg differ diff --git a/sdxl_styles/samples/ads_automotive.jpg b/sdxl_styles/samples/ads_automotive.jpg new file mode 100644 index 0000000000000000000000000000000000000000..ceea6c4d2504dc49430e7f746b4b05ea7d63d8c9 Binary files /dev/null and b/sdxl_styles/samples/ads_automotive.jpg differ diff --git a/sdxl_styles/samples/ads_corporate.jpg b/sdxl_styles/samples/ads_corporate.jpg new file mode 100644 index 0000000000000000000000000000000000000000..1d590743b5bb538904940b18fd89f2c5b2a66bc5 Binary files /dev/null and b/sdxl_styles/samples/ads_corporate.jpg differ diff --git a/sdxl_styles/samples/ads_fashion_editorial.jpg b/sdxl_styles/samples/ads_fashion_editorial.jpg new file mode 100644 index 0000000000000000000000000000000000000000..22fdd23241fb096a7a7f5a7310c5cc5dc48cd4ba Binary files /dev/null and b/sdxl_styles/samples/ads_fashion_editorial.jpg differ diff --git a/sdxl_styles/samples/ads_food_photography.jpg b/sdxl_styles/samples/ads_food_photography.jpg new file mode 100644 index 0000000000000000000000000000000000000000..64c38affdd77c5a44bc4adab775a6fdc15ce15eb Binary files /dev/null and b/sdxl_styles/samples/ads_food_photography.jpg differ diff --git a/sdxl_styles/samples/ads_gourmet_food_photography.jpg b/sdxl_styles/samples/ads_gourmet_food_photography.jpg new file mode 100644 index 0000000000000000000000000000000000000000..305770b0fdb973d3ebe643e5c40cff9c6f7fcdea Binary files /dev/null and b/sdxl_styles/samples/ads_gourmet_food_photography.jpg differ diff --git a/sdxl_styles/samples/ads_luxury.jpg b/sdxl_styles/samples/ads_luxury.jpg new file mode 100644 index 0000000000000000000000000000000000000000..542485689346d7bb75e64b31d679f0e4b6421c9e Binary files /dev/null and b/sdxl_styles/samples/ads_luxury.jpg differ diff --git a/sdxl_styles/samples/ads_real_estate.jpg b/sdxl_styles/samples/ads_real_estate.jpg new file mode 100644 index 0000000000000000000000000000000000000000..438b9fd189325065c9bfbfdfbec191b3213e6702 Binary files /dev/null and b/sdxl_styles/samples/ads_real_estate.jpg differ diff --git a/sdxl_styles/samples/ads_retail.jpg b/sdxl_styles/samples/ads_retail.jpg new file mode 100644 index 0000000000000000000000000000000000000000..93aea1e752d4c4d52b59283e4354d3e18d65b39f Binary files /dev/null and b/sdxl_styles/samples/ads_retail.jpg differ diff --git a/sdxl_styles/samples/art_deco.jpg b/sdxl_styles/samples/art_deco.jpg new file mode 100644 index 0000000000000000000000000000000000000000..7a37c72287516cfdef08803cdc5f73055885f69e Binary files /dev/null and b/sdxl_styles/samples/art_deco.jpg differ diff --git a/sdxl_styles/samples/art_nouveau.jpg b/sdxl_styles/samples/art_nouveau.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e318db83b14e5d558ccfa7c88bca866cc2cb2cf2 Binary files /dev/null and b/sdxl_styles/samples/art_nouveau.jpg differ diff --git a/sdxl_styles/samples/artstyle_abstract.jpg b/sdxl_styles/samples/artstyle_abstract.jpg new file mode 100644 index 0000000000000000000000000000000000000000..d1c3223b6a26df1ee2a12c371b4ef429509f587a Binary files /dev/null and b/sdxl_styles/samples/artstyle_abstract.jpg differ diff --git a/sdxl_styles/samples/artstyle_abstract_expressionism.jpg b/sdxl_styles/samples/artstyle_abstract_expressionism.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c2a9db026a06dbbf0d7fe52278eb6b54c3266c36 Binary files /dev/null and b/sdxl_styles/samples/artstyle_abstract_expressionism.jpg differ diff --git a/sdxl_styles/samples/artstyle_art_deco.jpg b/sdxl_styles/samples/artstyle_art_deco.jpg new file mode 100644 index 0000000000000000000000000000000000000000..d466541edc103cfdd475a16e41c1144dc9c030a9 Binary files /dev/null and b/sdxl_styles/samples/artstyle_art_deco.jpg differ diff --git a/sdxl_styles/samples/artstyle_art_nouveau.jpg b/sdxl_styles/samples/artstyle_art_nouveau.jpg new file mode 100644 index 0000000000000000000000000000000000000000..1f34ae95eb5332239c6d201cf59051ec9ba0c564 Binary files /dev/null and b/sdxl_styles/samples/artstyle_art_nouveau.jpg differ diff --git a/sdxl_styles/samples/artstyle_constructivist.jpg b/sdxl_styles/samples/artstyle_constructivist.jpg new file mode 100644 index 0000000000000000000000000000000000000000..161161a5bfd5e4b1be43de533294931be1b1b34a Binary files /dev/null and b/sdxl_styles/samples/artstyle_constructivist.jpg differ diff --git a/sdxl_styles/samples/artstyle_cubist.jpg b/sdxl_styles/samples/artstyle_cubist.jpg new file mode 100644 index 0000000000000000000000000000000000000000..016cce7d66eb64f48bf15a0d6ab563fa8195a027 Binary files /dev/null and b/sdxl_styles/samples/artstyle_cubist.jpg differ diff --git a/sdxl_styles/samples/artstyle_expressionist.jpg b/sdxl_styles/samples/artstyle_expressionist.jpg new file mode 100644 index 0000000000000000000000000000000000000000..40eec1dbcb78a30321f5204bd2ae2f12ece14edf Binary files /dev/null and b/sdxl_styles/samples/artstyle_expressionist.jpg differ diff --git a/sdxl_styles/samples/artstyle_graffiti.jpg b/sdxl_styles/samples/artstyle_graffiti.jpg new file mode 100644 index 0000000000000000000000000000000000000000..12c6c5fa113b7df7dfa9d076670541032d497708 Binary files /dev/null and b/sdxl_styles/samples/artstyle_graffiti.jpg differ diff --git a/sdxl_styles/samples/artstyle_hyperrealism.jpg b/sdxl_styles/samples/artstyle_hyperrealism.jpg new file mode 100644 index 0000000000000000000000000000000000000000..8ab9e619351a6f3728b53731bb8c9534ad1f7343 Binary files /dev/null and b/sdxl_styles/samples/artstyle_hyperrealism.jpg differ diff --git a/sdxl_styles/samples/artstyle_impressionist.jpg b/sdxl_styles/samples/artstyle_impressionist.jpg new file mode 100644 index 0000000000000000000000000000000000000000..a932fb9932fde9c10a24bd613cb9575a5dea3735 Binary files /dev/null and b/sdxl_styles/samples/artstyle_impressionist.jpg differ diff --git a/sdxl_styles/samples/artstyle_pointillism.jpg b/sdxl_styles/samples/artstyle_pointillism.jpg new file mode 100644 index 0000000000000000000000000000000000000000..902ee1c77bcd58b4912a7b09f2ac576396bdc5d2 Binary files /dev/null and b/sdxl_styles/samples/artstyle_pointillism.jpg differ diff --git a/sdxl_styles/samples/artstyle_pop_art.jpg b/sdxl_styles/samples/artstyle_pop_art.jpg new file mode 100644 index 0000000000000000000000000000000000000000..1c9864b024745fe5fb6df7418b669ebbe954563c Binary files /dev/null and b/sdxl_styles/samples/artstyle_pop_art.jpg differ diff --git a/sdxl_styles/samples/artstyle_psychedelic.jpg b/sdxl_styles/samples/artstyle_psychedelic.jpg new file mode 100644 index 0000000000000000000000000000000000000000..42b7c9906d35e650154f0b09b8e675d5592195bb Binary files /dev/null and b/sdxl_styles/samples/artstyle_psychedelic.jpg differ diff --git a/sdxl_styles/samples/artstyle_renaissance.jpg b/sdxl_styles/samples/artstyle_renaissance.jpg new file mode 100644 index 0000000000000000000000000000000000000000..322b758d69bc315190b1047b15e7a214f50dc30c Binary files /dev/null and b/sdxl_styles/samples/artstyle_renaissance.jpg differ diff --git a/sdxl_styles/samples/artstyle_steampunk.jpg b/sdxl_styles/samples/artstyle_steampunk.jpg new file mode 100644 index 0000000000000000000000000000000000000000..0ecf4ff9ed38aea7bece7bf5208542cbcd3fbb6d Binary files /dev/null and b/sdxl_styles/samples/artstyle_steampunk.jpg differ diff --git a/sdxl_styles/samples/artstyle_surrealist.jpg b/sdxl_styles/samples/artstyle_surrealist.jpg new file mode 100644 index 0000000000000000000000000000000000000000..44c48215d83b7a717d0e37770ced535ca8e1442b Binary files /dev/null and b/sdxl_styles/samples/artstyle_surrealist.jpg differ diff --git a/sdxl_styles/samples/artstyle_typography.jpg b/sdxl_styles/samples/artstyle_typography.jpg new file mode 100644 index 0000000000000000000000000000000000000000..5a36ae508069f7f6726b5fbe785b189a1d5c4a79 Binary files /dev/null and b/sdxl_styles/samples/artstyle_typography.jpg differ diff --git a/sdxl_styles/samples/artstyle_watercolor.jpg b/sdxl_styles/samples/artstyle_watercolor.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f7d9cc30c28cd8afed21f1d0b7a58efdfd01613c Binary files /dev/null and b/sdxl_styles/samples/artstyle_watercolor.jpg differ diff --git a/sdxl_styles/samples/astral_aura.jpg b/sdxl_styles/samples/astral_aura.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e13f8493122b99cd2e2062f5cd05b4139ddbeb5d Binary files /dev/null and b/sdxl_styles/samples/astral_aura.jpg differ diff --git a/sdxl_styles/samples/avant_garde.jpg b/sdxl_styles/samples/avant_garde.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f1e29b897460a9f28496a9771d07ee95f66f2cab Binary files /dev/null and b/sdxl_styles/samples/avant_garde.jpg differ diff --git a/sdxl_styles/samples/baroque.jpg b/sdxl_styles/samples/baroque.jpg new file mode 100644 index 0000000000000000000000000000000000000000..718aef7a8b5ef3b791a7875d0b474ea99ef21bca Binary files /dev/null and b/sdxl_styles/samples/baroque.jpg differ diff --git a/sdxl_styles/samples/bauhaus_style_poster.jpg b/sdxl_styles/samples/bauhaus_style_poster.jpg new file mode 100644 index 0000000000000000000000000000000000000000..087fe3b59c619df3462814ba0b8aecb6d310efa0 Binary files /dev/null and b/sdxl_styles/samples/bauhaus_style_poster.jpg differ diff --git a/sdxl_styles/samples/blueprint_schematic_drawing.jpg b/sdxl_styles/samples/blueprint_schematic_drawing.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e30120103eddf0ef48b7221e8b54fb0f72a50b2e Binary files /dev/null and b/sdxl_styles/samples/blueprint_schematic_drawing.jpg differ diff --git a/sdxl_styles/samples/caricature.jpg b/sdxl_styles/samples/caricature.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2ff3ee35c4a54ff88c1a6db87871e917d045c3ca Binary files /dev/null and b/sdxl_styles/samples/caricature.jpg differ diff --git a/sdxl_styles/samples/cel_shaded_art.jpg b/sdxl_styles/samples/cel_shaded_art.jpg new file mode 100644 index 0000000000000000000000000000000000000000..8a69ac22342ad769cead1d969cdc3fb6e6e952f5 Binary files /dev/null and b/sdxl_styles/samples/cel_shaded_art.jpg differ diff --git a/sdxl_styles/samples/character_design_sheet.jpg b/sdxl_styles/samples/character_design_sheet.jpg new file mode 100644 index 0000000000000000000000000000000000000000..6f8fb6650656ff0301f3a910d9de32a4360fb558 Binary files /dev/null and b/sdxl_styles/samples/character_design_sheet.jpg differ diff --git a/sdxl_styles/samples/cinematic_diva.jpg b/sdxl_styles/samples/cinematic_diva.jpg new file mode 100644 index 0000000000000000000000000000000000000000..74483019379254476f74da2d30d75681e479e792 Binary files /dev/null and b/sdxl_styles/samples/cinematic_diva.jpg differ diff --git a/sdxl_styles/samples/classicism_art.jpg b/sdxl_styles/samples/classicism_art.jpg new file mode 100644 index 0000000000000000000000000000000000000000..bf8e703382665a8adc57b8bff894bdb147014cd7 Binary files /dev/null and b/sdxl_styles/samples/classicism_art.jpg differ diff --git a/sdxl_styles/samples/color_field_painting.jpg b/sdxl_styles/samples/color_field_painting.jpg new file mode 100644 index 0000000000000000000000000000000000000000..92b4e0981fe402b823abe5a7dbd15dac0603c4b7 Binary files /dev/null and b/sdxl_styles/samples/color_field_painting.jpg differ diff --git a/sdxl_styles/samples/colored_pencil_art.jpg b/sdxl_styles/samples/colored_pencil_art.jpg new file mode 100644 index 0000000000000000000000000000000000000000..1a7c590e2026c3479043f395cba89502dafbd086 Binary files /dev/null and b/sdxl_styles/samples/colored_pencil_art.jpg differ diff --git a/sdxl_styles/samples/conceptual_art.jpg b/sdxl_styles/samples/conceptual_art.jpg new file mode 100644 index 0000000000000000000000000000000000000000..06882a20e9338f0a6e13e019cd284ac53891cf79 Binary files /dev/null and b/sdxl_styles/samples/conceptual_art.jpg differ diff --git a/sdxl_styles/samples/constructivism.jpg b/sdxl_styles/samples/constructivism.jpg new file mode 100644 index 0000000000000000000000000000000000000000..d49c6828eb75a01a3aa45d22d28d274344a2ecd4 Binary files /dev/null and b/sdxl_styles/samples/constructivism.jpg differ diff --git a/sdxl_styles/samples/cubism.jpg b/sdxl_styles/samples/cubism.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2ca0f2861396911b16f6602e9643f4c23bfbbc39 Binary files /dev/null and b/sdxl_styles/samples/cubism.jpg differ diff --git a/sdxl_styles/samples/dadaism.jpg b/sdxl_styles/samples/dadaism.jpg new file mode 100644 index 0000000000000000000000000000000000000000..5573cb0778c354b096048ae166beb210cc855b08 Binary files /dev/null and b/sdxl_styles/samples/dadaism.jpg differ diff --git a/sdxl_styles/samples/dark_fantasy.jpg b/sdxl_styles/samples/dark_fantasy.jpg new file mode 100644 index 0000000000000000000000000000000000000000..7d60f6dd4db92fe63f5f0f51368f26823a6ed3f0 Binary files /dev/null and b/sdxl_styles/samples/dark_fantasy.jpg differ diff --git a/sdxl_styles/samples/dark_moody_atmosphere.jpg b/sdxl_styles/samples/dark_moody_atmosphere.jpg new file mode 100644 index 0000000000000000000000000000000000000000..38921c628616c0f499800fa5591f4190af933b6d Binary files /dev/null and b/sdxl_styles/samples/dark_moody_atmosphere.jpg differ diff --git a/sdxl_styles/samples/dmt_art_style.jpg b/sdxl_styles/samples/dmt_art_style.jpg new file mode 100644 index 0000000000000000000000000000000000000000..a7ffae0b5aad8bf1ad1599166ea724af07bcf3eb Binary files /dev/null and b/sdxl_styles/samples/dmt_art_style.jpg differ diff --git a/sdxl_styles/samples/doodle_art.jpg b/sdxl_styles/samples/doodle_art.jpg new file mode 100644 index 0000000000000000000000000000000000000000..8944eb0b13828543ceeb3a990d1e167c663a9357 Binary files /dev/null and b/sdxl_styles/samples/doodle_art.jpg differ diff --git a/sdxl_styles/samples/double_exposure.jpg b/sdxl_styles/samples/double_exposure.jpg new file mode 100644 index 0000000000000000000000000000000000000000..15b6fbb4e90d2727333ab80436a83e7bee72cfa0 Binary files /dev/null and b/sdxl_styles/samples/double_exposure.jpg differ diff --git a/sdxl_styles/samples/dripping_paint_splatter_art.jpg b/sdxl_styles/samples/dripping_paint_splatter_art.jpg new file mode 100644 index 0000000000000000000000000000000000000000..697c4438e1d06c55138453d76fe473cb265703f1 Binary files /dev/null and b/sdxl_styles/samples/dripping_paint_splatter_art.jpg differ diff --git a/sdxl_styles/samples/expressionism.jpg b/sdxl_styles/samples/expressionism.jpg new file mode 100644 index 0000000000000000000000000000000000000000..df5e777058aaa746cf96b2bdb77bb8c8fa23c3f8 Binary files /dev/null and b/sdxl_styles/samples/expressionism.jpg differ diff --git a/sdxl_styles/samples/faded_polaroid_photo.jpg b/sdxl_styles/samples/faded_polaroid_photo.jpg new file mode 100644 index 0000000000000000000000000000000000000000..51b2a135cab3601505681c4063f4717a47a2ed4d Binary files /dev/null and b/sdxl_styles/samples/faded_polaroid_photo.jpg differ diff --git a/sdxl_styles/samples/fauvism.jpg b/sdxl_styles/samples/fauvism.jpg new file mode 100644 index 0000000000000000000000000000000000000000..5afaaf5ee7e3d16ef7d8c961bff3d024e32608f2 Binary files /dev/null and b/sdxl_styles/samples/fauvism.jpg differ diff --git a/sdxl_styles/samples/flat_2d_art.jpg b/sdxl_styles/samples/flat_2d_art.jpg new file mode 100644 index 0000000000000000000000000000000000000000..9fba930e1cd8e06442577a60d85a34fcb7f0201c Binary files /dev/null and b/sdxl_styles/samples/flat_2d_art.jpg differ diff --git a/sdxl_styles/samples/fooocus_cinematic.jpg b/sdxl_styles/samples/fooocus_cinematic.jpg new file mode 100644 index 0000000000000000000000000000000000000000..1521f7400d9afb647b7a655f98ed34f20f752443 Binary files /dev/null and b/sdxl_styles/samples/fooocus_cinematic.jpg differ diff --git a/sdxl_styles/samples/fooocus_enhance.jpg b/sdxl_styles/samples/fooocus_enhance.jpg new file mode 100644 index 0000000000000000000000000000000000000000..20e5ba2f5a945df962b6573968a37c4e6b06d560 Binary files /dev/null and b/sdxl_styles/samples/fooocus_enhance.jpg differ diff --git a/sdxl_styles/samples/fooocus_masterpiece.jpg b/sdxl_styles/samples/fooocus_masterpiece.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e57b1fd0a89ddeaabad8dbf44099396752144fc8 Binary files /dev/null and b/sdxl_styles/samples/fooocus_masterpiece.jpg differ diff --git a/sdxl_styles/samples/fooocus_negative.jpg b/sdxl_styles/samples/fooocus_negative.jpg new file mode 100644 index 0000000000000000000000000000000000000000..b025c43fbfdf3add44388344eeb028bae9545290 Binary files /dev/null and b/sdxl_styles/samples/fooocus_negative.jpg differ diff --git a/sdxl_styles/samples/fooocus_photograph.jpg b/sdxl_styles/samples/fooocus_photograph.jpg new file mode 100644 index 0000000000000000000000000000000000000000..3f28b857cc017fba844519e7fc03534495e47b3c Binary files /dev/null and b/sdxl_styles/samples/fooocus_photograph.jpg differ diff --git a/sdxl_styles/samples/fooocus_sharp.jpg b/sdxl_styles/samples/fooocus_sharp.jpg new file mode 100644 index 0000000000000000000000000000000000000000..12f7145ce851dfdb3436db016e71753939babf77 Binary files /dev/null and b/sdxl_styles/samples/fooocus_sharp.jpg differ diff --git a/sdxl_styles/samples/fooocus_v2.jpg b/sdxl_styles/samples/fooocus_v2.jpg new file mode 100644 index 0000000000000000000000000000000000000000..6e94d5b07c45e9e0ca30157d7b4176a206980ecc Binary files /dev/null and b/sdxl_styles/samples/fooocus_v2.jpg differ diff --git a/sdxl_styles/samples/fortnite_art_style.jpg b/sdxl_styles/samples/fortnite_art_style.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e90a4f643e7f73253cd0bc70a98090a3f25d4029 Binary files /dev/null and b/sdxl_styles/samples/fortnite_art_style.jpg differ diff --git a/sdxl_styles/samples/futurism.jpg b/sdxl_styles/samples/futurism.jpg new file mode 100644 index 0000000000000000000000000000000000000000..85267a6264350f756797fe771cd79e70452a9bae Binary files /dev/null and b/sdxl_styles/samples/futurism.jpg differ diff --git a/sdxl_styles/samples/futuristic_biomechanical.jpg b/sdxl_styles/samples/futuristic_biomechanical.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f8c5c082260146798873de714c250f93ebbaa3ae Binary files /dev/null and b/sdxl_styles/samples/futuristic_biomechanical.jpg differ diff --git a/sdxl_styles/samples/futuristic_biomechanical_cyberpunk.jpg b/sdxl_styles/samples/futuristic_biomechanical_cyberpunk.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e29a9b5bd367657c6a3dd11106d3403ad85ad826 Binary files /dev/null and b/sdxl_styles/samples/futuristic_biomechanical_cyberpunk.jpg differ diff --git a/sdxl_styles/samples/futuristic_cybernetic.jpg b/sdxl_styles/samples/futuristic_cybernetic.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f8042285abe43d485cffe233ffe7eb02de843431 Binary files /dev/null and b/sdxl_styles/samples/futuristic_cybernetic.jpg differ diff --git a/sdxl_styles/samples/futuristic_cybernetic_robot.jpg b/sdxl_styles/samples/futuristic_cybernetic_robot.jpg new file mode 100644 index 0000000000000000000000000000000000000000..6f988b788867cce79112e3af95e0b6f9a44f622a Binary files /dev/null and b/sdxl_styles/samples/futuristic_cybernetic_robot.jpg differ diff --git a/sdxl_styles/samples/futuristic_cyberpunk_cityscape.jpg b/sdxl_styles/samples/futuristic_cyberpunk_cityscape.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c05280b703e3ca4e6d50b080521d33bbd4250a21 Binary files /dev/null and b/sdxl_styles/samples/futuristic_cyberpunk_cityscape.jpg differ diff --git a/sdxl_styles/samples/futuristic_futuristic.jpg b/sdxl_styles/samples/futuristic_futuristic.jpg new file mode 100644 index 0000000000000000000000000000000000000000..da8d4ccfa3cece9863f8b86bc7d355e8431a6317 Binary files /dev/null and b/sdxl_styles/samples/futuristic_futuristic.jpg differ diff --git a/sdxl_styles/samples/futuristic_retro_cyberpunk.jpg b/sdxl_styles/samples/futuristic_retro_cyberpunk.jpg new file mode 100644 index 0000000000000000000000000000000000000000..7686243e3eeacec321e86089401c92269cc5613e Binary files /dev/null and b/sdxl_styles/samples/futuristic_retro_cyberpunk.jpg differ diff --git a/sdxl_styles/samples/futuristic_retro_futurism.jpg b/sdxl_styles/samples/futuristic_retro_futurism.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f0fa6e949bd838d17b7b2b33d5aba10741f793b2 Binary files /dev/null and b/sdxl_styles/samples/futuristic_retro_futurism.jpg differ diff --git a/sdxl_styles/samples/futuristic_sci_fi.jpg b/sdxl_styles/samples/futuristic_sci_fi.jpg new file mode 100644 index 0000000000000000000000000000000000000000..571c61410013ad9998bae4bfa33c11bd7eee7519 Binary files /dev/null and b/sdxl_styles/samples/futuristic_sci_fi.jpg differ diff --git a/sdxl_styles/samples/futuristic_vaporwave.jpg b/sdxl_styles/samples/futuristic_vaporwave.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f8a77fe6aeb87c8708d21579848bef75fa4d5e6c Binary files /dev/null and b/sdxl_styles/samples/futuristic_vaporwave.jpg differ diff --git a/sdxl_styles/samples/game_bubble_bobble.jpg b/sdxl_styles/samples/game_bubble_bobble.jpg new file mode 100644 index 0000000000000000000000000000000000000000..1111de9ef203088129ded781efe3ef12ced0262d Binary files /dev/null and b/sdxl_styles/samples/game_bubble_bobble.jpg differ diff --git a/sdxl_styles/samples/game_cyberpunk_game.jpg b/sdxl_styles/samples/game_cyberpunk_game.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e87451de0518201b235a3588ab232e5b214e0b13 Binary files /dev/null and b/sdxl_styles/samples/game_cyberpunk_game.jpg differ diff --git a/sdxl_styles/samples/game_fighting_game.jpg b/sdxl_styles/samples/game_fighting_game.jpg new file mode 100644 index 0000000000000000000000000000000000000000..b12c07d3f738224840730d899d9aebc5a6941a37 Binary files /dev/null and b/sdxl_styles/samples/game_fighting_game.jpg differ diff --git a/sdxl_styles/samples/game_gta.jpg b/sdxl_styles/samples/game_gta.jpg new file mode 100644 index 0000000000000000000000000000000000000000..6458c6d856b51485ab736c21e302046219c9d3db Binary files /dev/null and b/sdxl_styles/samples/game_gta.jpg differ diff --git a/sdxl_styles/samples/game_mario.jpg b/sdxl_styles/samples/game_mario.jpg new file mode 100644 index 0000000000000000000000000000000000000000..17cff4c4025851e84c00495158bdcb62ac170cbc Binary files /dev/null and b/sdxl_styles/samples/game_mario.jpg differ diff --git a/sdxl_styles/samples/game_minecraft.jpg b/sdxl_styles/samples/game_minecraft.jpg new file mode 100644 index 0000000000000000000000000000000000000000..4e20641fbfc8776e98629acc6b3cd1ef513a780e Binary files /dev/null and b/sdxl_styles/samples/game_minecraft.jpg differ diff --git a/sdxl_styles/samples/game_pokemon.jpg b/sdxl_styles/samples/game_pokemon.jpg new file mode 100644 index 0000000000000000000000000000000000000000..20071f80ae889eb581202eee1fe54093a8fe4e98 Binary files /dev/null and b/sdxl_styles/samples/game_pokemon.jpg differ diff --git a/sdxl_styles/samples/game_retro_arcade.jpg b/sdxl_styles/samples/game_retro_arcade.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c3836dc8d40f6eb91a45169913232e3927d4f6cc Binary files /dev/null and b/sdxl_styles/samples/game_retro_arcade.jpg differ diff --git a/sdxl_styles/samples/game_retro_game.jpg b/sdxl_styles/samples/game_retro_game.jpg new file mode 100644 index 0000000000000000000000000000000000000000..ff81488a93721c18af399bebe919b2dcfd924a48 Binary files /dev/null and b/sdxl_styles/samples/game_retro_game.jpg differ diff --git a/sdxl_styles/samples/game_rpg_fantasy_game.jpg b/sdxl_styles/samples/game_rpg_fantasy_game.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c32a2cc750045bf3015cf70ac9aa3d461641e0f8 Binary files /dev/null and b/sdxl_styles/samples/game_rpg_fantasy_game.jpg differ diff --git a/sdxl_styles/samples/game_strategy_game.jpg b/sdxl_styles/samples/game_strategy_game.jpg new file mode 100644 index 0000000000000000000000000000000000000000..a55eff5cd20a5ab203d7b52233060134230140a1 Binary files /dev/null and b/sdxl_styles/samples/game_strategy_game.jpg differ diff --git a/sdxl_styles/samples/game_streetfighter.jpg b/sdxl_styles/samples/game_streetfighter.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f389e0d316eb0f3631bc07b368d0c0650e8ef898 Binary files /dev/null and b/sdxl_styles/samples/game_streetfighter.jpg differ diff --git a/sdxl_styles/samples/game_zelda.jpg b/sdxl_styles/samples/game_zelda.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f9b875d773daa8721f85dc524c8458c13f750d7d Binary files /dev/null and b/sdxl_styles/samples/game_zelda.jpg differ diff --git a/sdxl_styles/samples/glitchcore.jpg b/sdxl_styles/samples/glitchcore.jpg new file mode 100644 index 0000000000000000000000000000000000000000..3122cda8721c6572cc6c10d133c7666ac280c4d1 Binary files /dev/null and b/sdxl_styles/samples/glitchcore.jpg differ diff --git a/sdxl_styles/samples/glo_fi.jpg b/sdxl_styles/samples/glo_fi.jpg new file mode 100644 index 0000000000000000000000000000000000000000..816b2244f9ded4013a617f99afbd3d7883c7ab6e Binary files /dev/null and b/sdxl_styles/samples/glo_fi.jpg differ diff --git a/sdxl_styles/samples/googie_art_style.jpg b/sdxl_styles/samples/googie_art_style.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e9a08c20989ea46e79ca4e6b9e33d1deb48fdd2f Binary files /dev/null and b/sdxl_styles/samples/googie_art_style.jpg differ diff --git a/sdxl_styles/samples/graffiti_art.jpg b/sdxl_styles/samples/graffiti_art.jpg new file mode 100644 index 0000000000000000000000000000000000000000..87aebdda2b72bc19a095a54047b983bc3ca587f1 Binary files /dev/null and b/sdxl_styles/samples/graffiti_art.jpg differ diff --git a/sdxl_styles/samples/harlem_renaissance_art.jpg b/sdxl_styles/samples/harlem_renaissance_art.jpg new file mode 100644 index 0000000000000000000000000000000000000000..bd3354942630a96ba59b581c7fc690bc83b63f5b Binary files /dev/null and b/sdxl_styles/samples/harlem_renaissance_art.jpg differ diff --git a/sdxl_styles/samples/high_fashion.jpg b/sdxl_styles/samples/high_fashion.jpg new file mode 100644 index 0000000000000000000000000000000000000000..4dfc404d20e832c29c2ee875798b68e02f4245ac Binary files /dev/null and b/sdxl_styles/samples/high_fashion.jpg differ diff --git a/sdxl_styles/samples/idyllic.jpg b/sdxl_styles/samples/idyllic.jpg new file mode 100644 index 0000000000000000000000000000000000000000..660e9cacb06caac3c21c3af8568cdbf99be38eff Binary files /dev/null and b/sdxl_styles/samples/idyllic.jpg differ diff --git a/sdxl_styles/samples/impressionism.jpg b/sdxl_styles/samples/impressionism.jpg new file mode 100644 index 0000000000000000000000000000000000000000..525222333a9386431ab47863f978621848e7273c Binary files /dev/null and b/sdxl_styles/samples/impressionism.jpg differ diff --git a/sdxl_styles/sdxl_styles_diva.json b/sdxl_styles/sdxl_styles_diva.json new file mode 100644 index 0000000000000000000000000000000000000000..ea552f4b92737f8d6369fb443210cf7446eed113 --- /dev/null +++ b/sdxl_styles/sdxl_styles_diva.json @@ -0,0 +1,397 @@ +[ + { + "name": "cinematic-diva", + "prompt": "UHD, 8K, ultra detailed, a cinematic photograph of {prompt}, beautiful lighting, great composition", + "negative_prompt": "ugly, deformed, noisy, blurry, NSFW" + }, + { + "name": "Abstract Expressionism", + "prompt": "Abstract Expressionism Art, {prompt}, High contrast, minimalistic, colorful, stark, dramatic, expressionism", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realism, photorealistic" + }, + { + "name": "Academia", + "prompt": "Academia, {prompt}, preppy Ivy League style, stark, dramatic, chic boarding school, academia", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, grunge, sloppy, unkempt" + }, + { + "name": "Action Figure", + "prompt": "Action Figure, {prompt}, plastic collectable action figure, collectable toy action figure", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Adorable 3D Character", + "prompt": "Adorable 3D Character, {prompt}, 3D render, adorable character, 3D art", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, grunge, sloppy, unkempt, photograph, photo, realistic" + }, + { + "name": "Adorable Kawaii", + "prompt": "Adorable Kawaii, {prompt}, pretty, cute, adorable, kawaii", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, gothic, dark, moody, monochromatic" + }, + { + "name": "Art Deco", + "prompt": "Art Deco, {prompt}, sleek, geometric forms, art deco style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Art Nouveau", + "prompt": "Art Nouveau, beautiful art, {prompt}, sleek, organic forms, long, sinuous, art nouveau style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, industrial, mechanical" + }, + { + "name": "Astral Aura", + "prompt": "Astral Aura, {prompt}, astral, colorful aura, vibrant energy", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Avant-garde", + "prompt": "Avant-garde, {prompt}, unusual, experimental, avant-garde art", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Baroque", + "prompt": "Baroque, {prompt}, dramatic, exuberant, grandeur, baroque art", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Bauhaus-Style Poster", + "prompt": "Bauhaus-Style Poster, {prompt}, simple geometric shapes, clean lines, primary colors, Bauhaus-Style Poster", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Blueprint Schematic Drawing", + "prompt": "Blueprint Schematic Drawing, {prompt}, technical drawing, blueprint, schematic", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Caricature", + "prompt": "Caricature, {prompt}, exaggerated, comical, caricature", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realistic" + }, + { + "name": "Cel Shaded Art", + "prompt": "Cel Shaded Art, {prompt}, 2D, flat color, toon shading, cel shaded style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Character Design Sheet", + "prompt": "Character Design Sheet, {prompt}, character reference sheet, character turn around", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Classicism Art", + "prompt": "Classicism Art, {prompt}, inspired by Roman and Greek culture, clarity, harmonious, classicism art", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Color Field Painting", + "prompt": "Color Field Painting, {prompt}, abstract, simple, geometic, color field painting style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Colored Pencil Art", + "prompt": "Colored Pencil Art, {prompt}, colored pencil strokes, light color, visible paper texture, colored pencil art", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Conceptual Art", + "prompt": "Conceptual Art, {prompt}, concept art", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Constructivism", + "prompt": "Constructivism Art, {prompt}, minimalistic, geometric forms, constructivism art", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Cubism", + "prompt": "Cubism Art, {prompt}, flat geometric forms, cubism art", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Dadaism", + "prompt": "Dadaism Art, {prompt}, satirical, nonsensical, dadaism art", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Dark Fantasy", + "prompt": "Dark Fantasy Art, {prompt}, dark, moody, dark fantasy style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, bright, sunny" + }, + { + "name": "Dark Moody Atmosphere", + "prompt": "Dark Moody Atmosphere, {prompt}, dramatic, mysterious, dark moody atmosphere", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, vibrant, colorful, bright" + }, + { + "name": "DMT Art Style", + "prompt": "DMT Art Style, {prompt}, bright colors, surreal visuals, swirling patterns, DMT art style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Doodle Art", + "prompt": "Doodle Art Style, {prompt}, drawing, freeform, swirling patterns, doodle art style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Double Exposure", + "prompt": "Double Exposure Style, {prompt}, double image ghost effect, image combination, double exposure style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Dripping Paint Splatter Art", + "prompt": "Dripping Paint Splatter Art, {prompt}, dramatic, paint drips, splatters, dripping paint", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Expressionism", + "prompt": "Expressionism Art Style, {prompt}, movement, contrast, emotional, exaggerated forms, expressionism art style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Faded Polaroid Photo", + "prompt": "Faded Polaroid Photo, {prompt}, analog, old faded photo, old polaroid", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, vibrant, colorful" + }, + { + "name": "Fauvism", + "prompt": "Fauvism Art, {prompt}, painterly, bold colors, textured brushwork, fauvism art", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Flat 2D Art", + "prompt": "Flat 2D Art, {prompt}, simple flat color, 2-dimensional, Flat 2D Art Style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, 3D, photo, realistic" + }, + { + "name": "Fortnite Art Style", + "prompt": "Fortnite Art Style, {prompt}, 3D cartoon, colorful, Fortnite Art Style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, photo, realistic" + }, + { + "name": "Futurism", + "prompt": "Futurism Art Style, {prompt}, dynamic, dramatic, Futurism Art Style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Glitchcore", + "prompt": "Glitchcore Art Style, {prompt}, dynamic, dramatic, distorted, vibrant colors, glitchcore art style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Glo-fi", + "prompt": "Glo-fi Art Style, {prompt}, dynamic, dramatic, vibrant colors, glo-fi art style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Googie Art Style", + "prompt": "Googie Art Style, {prompt}, dynamic, dramatic, 1950's futurism, bold boomerang angles, Googie art style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Graffiti Art", + "prompt": "Graffiti Art Style, {prompt}, dynamic, dramatic, vibrant colors, graffiti art style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Harlem Renaissance Art", + "prompt": "Harlem Renaissance Art Style, {prompt}, dynamic, dramatic, 1920s African American culture, Harlem Renaissance art style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "High Fashion", + "prompt": "High Fashion, {prompt}, dynamic, dramatic, haute couture, elegant, ornate clothing, High Fashion", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Idyllic", + "prompt": "Idyllic, {prompt}, peaceful, happy, pleasant, happy, harmonious, picturesque, charming", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Impressionism", + "prompt": "Impressionism, {prompt}, painterly, small brushstrokes, visible brushstrokes, impressionistic style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Infographic Drawing", + "prompt": "Infographic Drawing, {prompt}, diagram, infographic", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Ink Dripping Drawing", + "prompt": "Ink Dripping Drawing, {prompt}, ink drawing, dripping ink", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, colorful, vibrant" + }, + { + "name": "Japanese Ink Drawing", + "prompt": "Japanese Ink Drawing, {prompt}, ink drawing, inkwash, Japanese Ink Drawing", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, colorful, vibrant" + }, + { + "name": "Knolling Photography", + "prompt": "Knolling Photography, {prompt}, flat lay photography, object arrangment, knolling photography", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Light Cheery Atmosphere", + "prompt": "Light Cheery Atmosphere, {prompt}, happy, joyful, cheerful, carefree, gleeful, lighthearted, pleasant atmosphere", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, monochromatic, dark, moody" + }, + { + "name": "Logo Design", + "prompt": "Logo Design, {prompt}, dynamic graphic art, vector art, minimalist, professional logo design", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Luxurious Elegance", + "prompt": "Luxurious Elegance, {prompt}, extravagant, ornate, designer, opulent, picturesque, lavish", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Macro Photography", + "prompt": "Macro Photography, {prompt}, close-up, macro 100mm, macro photography", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Mandola Art", + "prompt": "Mandola art style, {prompt}, complex, circular design, mandola", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Marker Drawing", + "prompt": "Marker Drawing, {prompt}, bold marker lines, visibile paper texture, marker drawing", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, photograph, realistic" + }, + { + "name": "Medievalism", + "prompt": "Medievalism, {prompt}, inspired by The Middle Ages, medieval art, elaborate patterns and decoration, Medievalism", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Minimalism", + "prompt": "Minimalism, {prompt}, abstract, simple geometic shapes, hard edges, sleek contours, Minimalism", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Neo-Baroque", + "prompt": "Neo-Baroque, {prompt}, ornate and elaborate, dynaimc, Neo-Baroque", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Neo-Byzantine", + "prompt": "Neo-Byzantine, {prompt}, grand decorative religious style, Orthodox Christian inspired, Neo-Byzantine", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Neo-Futurism", + "prompt": "Neo-Futurism, {prompt}, high-tech, curves, spirals, flowing lines, idealistic future, Neo-Futurism", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Neo-Impressionism", + "prompt": "Neo-Impressionism, {prompt}, tiny dabs of color, Pointillism, painterly, Neo-Impressionism", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, photograph, realistic" + }, + { + "name": "Neo-Rococo", + "prompt": "Neo-Rococo, {prompt}, curved forms, naturalistic ornamentation, elaborate, decorative, gaudy, Neo-Rococo", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Neoclassicism", + "prompt": "Neoclassicism, {prompt}, ancient Rome and Greece inspired, idealic, sober colors, Neoclassicism", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Op Art", + "prompt": "Op Art, {prompt}, optical illusion, abstract, geometric pattern, impression of movement, Op Art", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Ornate and Intricate", + "prompt": "Ornate and Intricate, {prompt}, decorative, highly detailed, elaborate, ornate, intricate", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Pencil Sketch Drawing", + "prompt": "Pencil Sketch Drawing, {prompt}, black and white drawing, graphite drawing", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Pop Art 2", + "prompt": "Pop Art, {prompt}, vivid colors, flat color, 2D, strong lines, Pop Art", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, photo, realistic" + }, + { + "name": "Rococo", + "prompt": "Rococo, {prompt}, flamboyant, pastel colors, curved lines, elaborate detail, Rococo", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Silhouette Art", + "prompt": "Silhouette Art, {prompt}, high contrast, well defined, Silhouette Art", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Simple Vector Art", + "prompt": "Simple Vector Art, {prompt}, 2D flat, simple shapes, minimalistic, professional graphic, flat color, high contrast, Simple Vector Art", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, 3D, photo, realistic" + }, + { + "name": "Sketchup", + "prompt": "Sketchup, {prompt}, CAD, professional design, Sketchup", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, photo, photograph" + }, + { + "name": "Steampunk 2", + "prompt": "Steampunk, {prompt}, retrofuturistic science fantasy, steam-powered tech, vintage industry, gears, neo-victorian, steampunk", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Surrealism", + "prompt": "Surrealism, {prompt}, expressive, dramatic, organic lines and forms, dreamlike and mysterious, Surrealism", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realistic" + }, + { + "name": "Suprematism", + "prompt": "Suprematism, {prompt}, abstract, limited color palette, geometric forms, Suprematism", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realistic" + }, + { + "name": "Terragen", + "prompt": "Terragen, {prompt}, beautiful massive landscape, epic scenery, Terragen", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Tranquil Relaxing Atmosphere", + "prompt": "Tranquil Relaxing Atmosphere, {prompt}, calming style, soothing colors, peaceful, idealic, Tranquil Relaxing Atmosphere", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, oversaturated" + }, + { + "name": "Sticker Designs", + "prompt": "Vector Art Stickers, {prompt}, professional vector design, sticker designs, Sticker Sheet", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Vibrant Rim Light", + "prompt": "Vibrant Rim Light, {prompt}, bright rim light, high contrast, bold edge light", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Volumetric Lighting", + "prompt": "Volumetric Lighting, {prompt}, light depth, dramatic atmospheric lighting, Volumetric Lighting", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast" + }, + { + "name": "Watercolor 2", + "prompt": "Watercolor style painting, {prompt}, visible paper texture, colorwash, watercolor", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, photo, realistic" + }, + { + "name": "Whimsical and Playful", + "prompt": "Whimsical and Playful, {prompt}, imaginative, fantastical, bight colors, stylized, happy, Whimsical and Playful", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, drab, boring, moody" + } +] diff --git a/sdxl_styles/sdxl_styles_fooocus.json b/sdxl_styles/sdxl_styles_fooocus.json new file mode 100644 index 0000000000000000000000000000000000000000..81d6442e6ea3d070a0c88d02596b6daa050cb12e --- /dev/null +++ b/sdxl_styles/sdxl_styles_fooocus.json @@ -0,0 +1,30 @@ +[ + { + "name": "Fooocus Enhance", + "negative_prompt": "(worst quality, low quality, normal quality, lowres, low details, oversaturated, undersaturated, overexposed, underexposed, grayscale, bw, bad photo, bad photography, bad art:1.4), (watermark, signature, text font, username, error, logo, words, letters, digits, autograph, trademark, name:1.2), (blur, blurry, grainy), morbid, ugly, asymmetrical, mutated malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, jpeg artifacts, out of focus, glitch, duplicate, (airbrushed, cartoon, anime, semi-realistic, cgi, render, blender, digital art, manga, amateur:1.3), (3D ,3D Game, 3D Game Scene, 3D Character:1.1), (bad hands, bad anatomy, bad body, bad face, bad teeth, bad arms, bad legs, deformities:1.3)" + }, + { + "name": "Fooocus Sharp", + "prompt": "cinematic still {prompt} . emotional, harmonious, vignette, 4k epic detailed, shot on kodak, 35mm photo, sharp focus, high budget, cinemascope, moody, epic, gorgeous, film grain, grainy", + "negative_prompt": "anime, cartoon, graphic, (blur, blurry, bokeh), text, painting, crayon, graphite, abstract, glitch, deformed, mutated, ugly, disfigured" + }, + { + "name": "Fooocus Masterpiece", + "prompt": "(masterpiece), (best quality), (ultra-detailed), {prompt}, illustration, disheveled hair, detailed eyes, perfect composition, moist skin, intricate details, earrings, by wlop", + "negative_prompt": "longbody, lowres, bad anatomy, bad hands, missing fingers, pubic hair,extra digit, fewer digits, cropped, worst quality, low quality" + }, + { + "name": "Fooocus Photograph", + "prompt": "photograph {prompt}, 50mm . cinematic 4k epic detailed 4k epic detailed photograph shot on kodak detailed cinematic hbo dark moody, 35mm photo, grainy, vignette, vintage, Kodachrome, Lomography, stained, highly detailed, found footage", + "negative_prompt": "Brad Pitt, bokeh, depth of field, blurry, cropped, regular face, saturated, contrast, deformed iris, deformed pupils, semi-realistic, cgi, 3d, render, sketch, cartoon, drawing, anime, text, cropped, out of frame, worst quality, low quality, jpeg artifacts, ugly, duplicate, morbid, mutilated, extra fingers, mutated hands, poorly drawn hands, poorly drawn face, mutation, deformed, dehydrated, bad anatomy, bad proportions, extra limbs, cloned face, disfigured, gross proportions, malformed limbs, missing arms, missing legs, extra arms, extra legs, fused fingers, too many fingers, long neck" + }, + { + "name": "Fooocus Negative", + "negative_prompt": "deformed, bad anatomy, disfigured, poorly drawn face, mutated, extra limb, ugly, poorly drawn hands, missing limb, floating limbs, disconnected limbs, disconnected head, malformed hands, long neck, mutated hands and fingers, bad hands, missing fingers, cropped, worst quality, low quality, mutation, poorly drawn, huge calf, bad hands, fused hand, missing hand, disappearing arms, disappearing thigh, disappearing calf, disappearing legs, missing fingers, fused fingers, abnormal eye proportion, Abnormal hands, abnormal legs, abnormal feet, abnormal fingers, drawing, painting, crayon, sketch, graphite, impressionist, noisy, blurry, soft, deformed, ugly, anime, cartoon, graphic, text, painting, crayon, graphite, abstract, glitch" + }, + { + "name": "Fooocus Cinematic", + "prompt": "cinematic still {prompt} . emotional, harmonious, vignette, highly detailed, high budget, bokeh, cinemascope, moody, epic, gorgeous, film grain, grainy", + "negative_prompt": "anime, cartoon, graphic, text, painting, crayon, graphite, abstract, glitch, deformed, mutated, ugly, disfigured" + } +] diff --git a/sdxl_styles/sdxl_styles_marc_k3nt3l.json b/sdxl_styles/sdxl_styles_marc_k3nt3l.json new file mode 100644 index 0000000000000000000000000000000000000000..fbbe1a24930f5372e7f73c601d652a5584dcdbeb --- /dev/null +++ b/sdxl_styles/sdxl_styles_marc_k3nt3l.json @@ -0,0 +1,312 @@ +[ + { + "name": "MK Chromolithography", + "prompt": "Chromolithograph {prompt}. Vibrant colors, intricate details, rich color saturation, meticulous registration, multi-layered printing, decorative elements, historical charm, artistic reproductions, commercial posters, nostalgic, ornate compositions.", + "negative_prompt": "monochromatic, simple designs, limited color palette, imprecise registration, minimalistic, modern aesthetic, digital appearance." + }, + { + "name": "MK Cross Processing Print", + "prompt": "Cross processing print {prompt}. Experimental color shifts, unconventional tonalities, vibrant and surreal hues, heightened contrasts, unpredictable results, artistic unpredictability, retro and vintage feel, dynamic color interplay, abstract and dreamlike.", + "negative_prompt": "predictable color tones, traditional processing, realistic color representation, subdued contrasts, standard photographic aesthetics." + }, + { + "name": "MK Dufaycolor Photograph", + "prompt": "Dufaycolor photograph {prompt}. Vintage color palette, distinctive color rendering, soft and dreamy atmosphere, historical charm, unique color process, grainy texture, evocative mood, nostalgic aesthetic, hand-tinted appearance, artistic patina.", + "negative_prompt": "modern color reproduction, hyperrealistic tones, sharp and clear details, digital precision, contemporary aesthetic." + }, + { + "name": "MK Herbarium", + "prompt": "Herbarium drawing{prompt}. Botanical accuracy, old botanical book illustration, detailed illustrations, pressed plants, delicate and precise linework, scientific documentation, meticulous presentation, educational purpose, organic compositions, timeless aesthetic, naturalistic beauty.", + "negative_prompt": "abstract representation, vibrant colors, artistic interpretation, chaotic compositions, fantastical elements, digital appearance." + }, + { + "name": "MK Punk Collage", + "prompt": "punk collage style {prompt} . mixed media, papercut,textured paper, overlapping, ripped posters, safety pins, chaotic layers, graffiti-style elements, anarchy symbols, vintage photos, cut-and-paste aesthetic, bold typography, distorted images, political messages, urban decay, distressed textures, newspaper clippings, spray paint, rebellious icons, DIY spirit, vivid colors, punk band logos, edgy and raw compositions, ", + "negative_prompt": "conventional,blurry, noisy, low contrast" + }, + { + "name": "MK mosaic", + "prompt": "mosaic style {prompt} . fragmented, assembled, colorful, highly detailed", + "negative_prompt": "whole, unbroken, monochrome" + }, + { + "name": "MK Van Gogh", + "prompt": "Oil painting by Van Gogh {prompt} . Expressive, impasto, swirling brushwork, vibrant, brush strokes, Brushstroke-heavy, Textured, Impasto, Colorful, Dynamic, Bold, Distinctive, Vibrant, Whirling, Expressive, Dramatic, Swirling, Layered, Intense, Contrastive, Atmospheric, Luminous, Textural, Evocative, SpiraledVan Gogh style", + "negative_prompt": "realistic, photorealistic, calm, straight lines, signature, frame, text, watermark" + }, + { + "name": "MK Coloring Book", + "prompt": "centered black and white high contrast line drawing, coloring book style,{prompt} . monochrome, blank white background", + "negative_prompt": "greyscale, gradients,shadows,shadow, colored, Red, Blue, Yellow, Green, Orange, Purple, Pink, Brown, Gray, Beige, Turquoise, Lavender, Cyan, Magenta, Olive, Indigo, black background" + }, + { + "name": "MK Singer Sargent", + "prompt": "Oil painting by John Singer Sargent {prompt}. Elegant, refined, masterful technique,realistic portrayal, subtle play of light, captivating expression, rich details, harmonious colors, skillful composition, brush strokes, chiaroscuro.", + "negative_prompt": "realistic, photorealistic, abstract, overly stylized, excessive contrasts, distorted,bright colors,disorder." + }, + { + "name": "MK Pollock", + "prompt": "Oil painting by Jackson Pollock {prompt}. Abstract expressionism, drip painting, chaotic composition, energetic, spontaneous, unconventional technique, dynamic, bold, distinctive, vibrant, intense, expressive, energetic, layered, non-representational, gestural.", + "negative_prompt": "(realistic:1.5), (photorealistic:1.5), representational, calm, ordered composition, precise lines, detailed forms, subdued colors, quiet, static, traditional, figurative." + }, + { + "name": "MK Basquiat", + "prompt": "Artwork by Jean-Michel Basquiat {prompt}. Neo-expressionism, street art influence, graffiti-inspired, raw, energetic, bold colors, dynamic composition, chaotic, layered, textural, expressive, spontaneous, distinctive, symbolic,energetic brushstrokes.", + "negative_prompt": "(realistic:1.5), (photorealistic:1.5), calm, precise lines, conventional composition, subdued" + }, + { + "name": "MK Andy Warhol", + "prompt": "Artwork in the style of Andy Warhol {prompt}. Pop art, vibrant colors, bold compositions, repetition of iconic imagery, celebrity culture, commercial aesthetics, mass production influence, stylized simplicity, cultural commentary, graphical elements, distinctive portraits.", + "negative_prompt": "subdued colors, realistic, lack of repetition, minimalistic." + }, + { + "name": "MK Halftone print", + "prompt": "Halftone print of {prompt}. Dot matrix pattern, grayscale tones, vintage aesthetic, newspaper print vibe, stylized dots, visual texture, black and white contrasts, retro appearance, artistic pointillism,pop culture, (Roy Lichtenstein style:1.5).", + "negative_prompt": "smooth gradients, continuous tones, vibrant colors." + }, + { + "name": "MK Gond Painting", + "prompt": "Gond painting {prompt}. Intricate patterns, vibrant colors, detailed motifs, nature-inspired themes, tribal folklore, fine lines, intricate detailing, storytelling compositions, mystical and folkloric, cultural richness.", + "negative_prompt": "monochromatic, abstract shapes, minimalistic." + }, + { + "name": "MK Albumen Print", + "prompt": "Albumen print {prompt}. Sepia tones, fine details, subtle tonal gradations, delicate highlights, vintage aesthetic, soft and muted atmosphere, historical charm, rich textures, meticulous craftsmanship, classic photographic technique, vignetting.", + "negative_prompt": "vibrant colors, high contrast, modern, digital appearance, sharp details, contemporary style." + }, + { + "name": "MK Aquatint Print", + "prompt": "Aquatint print {prompt}. Soft tonal gradations, atmospheric effects, velvety textures, rich contrasts, fine details, etching process, delicate lines, nuanced shading, expressive and moody atmosphere, artistic depth.", + "negative_prompt": "sharp contrasts, bold lines, minimalistic." + }, + { + "name": "MK Anthotype Print", + "prompt": "Anthotype print {prompt}. Monochrome dye, soft and muted colors, organic textures, ephemeral and delicate appearance, low details, watercolor canvas, low contrast, overexposed, silhouette, textured paper.", + "negative_prompt": "vibrant synthetic dyes, bold and saturated colors." + }, + { + "name": "MK Inuit Carving", + "prompt": "A sculpture made of ivory, {prompt} made of . Sculptures, Inuit art style, intricate carvings, natural materials, storytelling motifs, arctic wildlife themes, symbolic representations, cultural traditions, earthy tones, harmonious compositions, spiritual and mythological elements.", + "negative_prompt": "abstract, vibrant colors." + }, + { + "name": "MK Bromoil Print", + "prompt": "Bromoil print {prompt}. Painterly effects, sepia tones, textured surfaces, rich contrasts, expressive brushwork, tonal variations, vintage aesthetic, atmospheric mood, handmade quality, artistic experimentation, darkroom craftsmanship, vignetting.", + "negative_prompt": "smooth surfaces, minimal brushwork, contemporary digital appearance." + }, + { + "name": "MK Calotype Print", + "prompt": "Calotype print {prompt}. Soft focus, subtle tonal range, paper negative process, fine details, vintage aesthetic, artistic experimentation, atmospheric mood, early photographic charm, handmade quality, vignetting.", + "negative_prompt": "sharp focus, bold contrasts, modern aesthetic, digital photography." + }, + { + "name": "MK Color Sketchnote", + "prompt": "Color sketchnote {prompt}. Hand-drawn elements, vibrant colors, visual hierarchy, playful illustrations, varied typography, graphic icons, organic and dynamic layout, personalized touches, creative expression, engaging storytelling.", + "negative_prompt": "monochromatic, geometric layout." + }, + { + "name": "MK Cibulak Porcelain", + "prompt": "A sculpture made of blue pattern porcelain of {prompt}. Classic design, blue and white color scheme, intricate detailing, floral motifs, onion-shaped elements, historical charm, rococo, white ware, cobalt blue, underglaze pattern, fine craftsmanship, traditional elegance, delicate patterns, vintage aesthetic, Meissen, Blue Onion pattern, Cibulak.", + "negative_prompt": "tea, teapot, cup, teacup,bright colors, bold and modern design, absence of intricate detailing, lack of floral motifs, non-traditional shapes." + }, + { + "name": "MK Alcohol Ink Art", + "prompt": "Alcohol ink art {prompt}. Fluid and vibrant colors, unpredictable patterns, organic textures, translucent layers, abstract compositions, ethereal and dreamy effects, free-flowing movement, expressive brushstrokes, contemporary aesthetic, wet textured paper.", + "negative_prompt": "monochromatic, controlled patterns." + }, + { + "name": "MK One Line Art", + "prompt": "One line art {prompt}. Continuous and unbroken black line, minimalistic, simplicity, economical use of space, flowing and dynamic, symbolic representations, contemporary aesthetic, evocative and abstract, white background.", + "negative_prompt": "disjointed lines, complexity, complex detailing." + }, + { + "name": "MK Blacklight Paint", + "prompt": "Blacklight paint {prompt}. Fluorescent pigments, vibrant and surreal colors, ethereal glow, otherworldly effects, dynamic and psychedelic compositions, neon aesthetics, transformative in ultraviolet light, contemporary and experimental.", + "negative_prompt": "muted colors, traditional and realistic compositions." + }, + { + "name": "MK Carnival Glass", + "prompt": "A sculpture made of Carnival glass, {prompt}. Iridescent surfaces, vibrant colors, intricate patterns, opalescent hues, reflective and prismatic effects, Art Nouveau and Art Deco influences, vintage charm, intricate detailing, lustrous and luminous appearance, Carnival Glass style.", + "negative_prompt": "non-iridescent surfaces, muted colors, absence of intricate patterns, lack of opalescent hues, modern and minimalist aesthetic." + }, + { + "name": "MK Cyanotype Print", + "prompt": "Cyanotype print {prompt}. Prussian blue tones, distinctive coloration, high contrast, blueprint aesthetics, atmospheric mood, sun-exposed paper, silhouette effects, delicate details, historical charm, handmade and experimental quality.", + "negative_prompt": "vibrant colors, low contrast, modern and polished appearance." + }, + { + "name": "MK Cross-Stitching", + "prompt": "Cross-stitching {prompt}. Intricate patterns, embroidery thread, sewing, fine details, precise stitches, textile artistry, symmetrical designs, varied color palette, traditional and contemporary motifs, handmade and crafted,canvas, nostalgic charm.", + "negative_prompt": "paper, paint, ink, photography." + }, + { + "name": "MK Encaustic Paint", + "prompt": "Encaustic paint {prompt}. Textured surfaces, translucent layers, luminous quality, wax medium, rich color saturation, fluid and organic shapes, contemporary and historical influences, mixed media elements, atmospheric depth.", + "negative_prompt": "flat surfaces, opaque layers, lack of wax medium, muted color palette, absence of textured surfaces, non-mixed media." + }, + { + "name": "MK Embroidery", + "prompt": "Embroidery {prompt}. Intricate stitching, embroidery thread, fine details, varied thread textures, textile artistry, embellished surfaces, diverse color palette, traditional and contemporary motifs, handmade and crafted, tactile and ornate.", + "negative_prompt": "minimalist, monochromatic." + }, + { + "name": "MK Gyotaku", + "prompt": "Gyotaku {prompt}. Fish impressions, realistic details, ink rubbings, textured surfaces, traditional Japanese art form, nature-inspired compositions, artistic representation of marine life, black and white contrasts, cultural significance.", + "negative_prompt": "photography." + }, + { + "name": "MK Luminogram", + "prompt": "Luminogram {prompt}. Photogram technique, ethereal and abstract effects, light and shadow interplay, luminous quality, experimental process, direct light exposure, unique and unpredictable results, artistic experimentation.", + "negative_prompt": "" + }, + { + "name": "MK Lite Brite Art", + "prompt": "Lite Brite art {prompt}. Luminous and colorful designs, pixelated compositions, retro aesthetic, glowing effects, creative patterns, interactive and playful, nostalgic charm, vibrant and dynamic arrangements.", + "negative_prompt": "monochromatic." + }, + { + "name": "MK Mokume-gane", + "prompt": "Mokume-gane {prompt}. Wood-grain patterns, mixed metal layers, intricate and organic designs, traditional Japanese metalwork, harmonious color combinations, artisanal craftsmanship, unique and layered textures, cultural and historical significance.", + "negative_prompt": "uniform metal surfaces." + }, + { + "name": "Pebble Art", + "prompt": "a sculpture made of peebles, {prompt}. Pebble art style,natural materials, textured surfaces, balanced compositions, organic forms, harmonious arrangements, tactile and 3D effects, beach-inspired aesthetic, creative storytelling, artisanal craftsmanship.", + "negative_prompt": "non-natural materials, lack of textured surfaces, imbalanced compositions, absence of organic forms, non-tactile appearance." + }, + { + "name": "MK Palekh", + "prompt": "Palekh art {prompt}. Miniature paintings, intricate details, vivid colors, folkloric themes, lacquer finish, storytelling compositions, symbolic elements, Russian folklore influence, cultural and historical significance.", + "negative_prompt": "large-scale paintings." + }, + { + "name": "MK Suminagashi", + "prompt": "Suminagashi {prompt}. Floating ink patterns, marbled effects, delicate and ethereal designs, water-based ink, fluid and unpredictable compositions, meditative process, monochromatic or subtle color palette, Japanese artistic tradition.", + "negative_prompt": "vibrant and bold color palette." + }, + { + "name": "MK Scrimshaw", + "prompt": "A Scrimshaw engraving of {prompt}. Intricate engravings on a spermwhale's teeth, marine motifs, detailed scenes, nautical themes, black and white contrasts, historical craftsmanship, artisanal carving, storytelling compositions, maritime heritage.", + "negative_prompt": "colorful, modern." + }, + { + "name": "MK Shibori", + "prompt": "Shibori {prompt}. Textured fabric, intricate patterns, resist-dyeing technique, indigo or vibrant colors, organic and flowing designs, Japanese textile art, cultural tradition, tactile and visual interest.", + "negative_prompt": "monochromatic." + }, + { + "name": "MK Vitreous Enamel", + "prompt": "A sculpture made of Vitreous enamel {prompt}. Smooth and glossy surfaces, vibrant colors, glass-like finish, durable and resilient, intricate detailing, traditional and contemporary applications, artistic craftsmanship, jewelry and decorative objects, , Vitreous enamel, colored glass.", + "negative_prompt": "rough surfaces, muted colors." + }, + { + "name": "MK Ukiyo-e", + "prompt": "Ukiyo-e {prompt}. Woodblock prints, vibrant colors, intricate details, depictions of landscapes, kabuki actors, beautiful women, cultural scenes, traditional Japanese art, artistic craftsmanship, historical significance.", + "negative_prompt": "absence of woodblock prints, muted colors, lack of intricate details, non-traditional Japanese themes, absence of cultural scenes." + }, + { + "name": "MK vintage-airline-poster", + "prompt": "vintage airline poster {prompt} . classic aviation fonts, pastel colors, elegant aircraft illustrations, scenic destinations, distressed textures, retro travel allure", + "negative_prompt": "modern fonts, bold colors, hyper-realistic, sleek design" + }, + { + "name": "MK vintage-travel-poster", + "prompt": "vintage travel poster {prompt} . retro fonts, muted colors, scenic illustrations, iconic landmarks, distressed textures, nostalgic vibes", + "negative_prompt": "modern fonts, vibrant colors, hyper-realistic, sleek design" + }, + { + "name": "MK bauhaus-style", + "prompt": "Bauhaus-inspired {prompt} . minimalism, geometric precision, primary colors, sans-serif typography, asymmetry, functional design", + "negative_prompt": "ornate, intricate, excessive detail, complex patterns, serif typography" + }, + { + "name": "MK afrofuturism", + "prompt": "Afrofuturism illustration {prompt} . vibrant colors, futuristic elements, cultural symbolism, cosmic imagery, dynamic patterns, empowering narratives", + "negative_prompt": "monochromatic" + }, + { + "name": "MK atompunk", + "prompt": "Atompunk illustation, {prompt} . retro-futuristic, atomic age aesthetics, sleek lines, metallic textures, futuristic technology, optimism, energy", + "negative_prompt": "organic, natural textures, rustic, dystopian" + }, + { + "name": "MK constructivism", + "prompt": "Constructivism {prompt} . geometric abstraction, bold colors, industrial aesthetics, dynamic compositions, utilitarian design, revolutionary spirit", + "negative_prompt": "organic shapes, muted colors, ornate elements, traditional" + }, + { + "name": "MK chicano-art", + "prompt": "Chicano art {prompt} . bold colors, cultural symbolism, muralism, lowrider aesthetics, barrio life, political messages, social activism, Mexico", + "negative_prompt": "monochromatic, minimalist, mainstream aesthetics" + }, + { + "name": "MK de-stijl", + "prompt": "De Stijl Art {prompt} . neoplasticism, primary colors, geometric abstraction, horizontal and vertical lines, simplicity, harmony, utopian ideals", + "negative_prompt": "complex patterns, muted colors, ornate elements, asymmetry" + }, + { + "name": "MK dayak-art", + "prompt": "Dayak art sculpture of {prompt} . intricate patterns, nature-inspired motifs, vibrant colors, traditional craftsmanship, cultural symbolism, storytelling", + "negative_prompt": "minimalist, monochromatic, modern" + }, + { + "name": "MK fayum-portrait", + "prompt": "Fayum portrait {prompt} . encaustic painting, realistic facial features, warm earth tones, serene expressions, ancient Egyptian influences", + "negative_prompt": "abstract, vibrant colors, exaggerated features, modern" + }, + { + "name": "MK illuminated-manuscript", + "prompt": "Illuminated manuscript {prompt} . intricate calligraphy, rich colors, detailed illustrations, gold leaf accents, ornate borders, religious, historical, medieval", + "negative_prompt": "modern typography, minimalist design, monochromatic, abstract themes" + }, + { + "name": "MK kalighat-painting", + "prompt": "Kalighat painting {prompt} . bold lines, vibrant colors, narrative storytelling, cultural motifs, flat compositions, expressive characters", + "negative_prompt": "subdued colors, intricate details, realistic portrayal, modern aesthetics" + }, + { + "name": "MK madhubani-painting", + "prompt": "Madhubani painting {prompt} . intricate patterns, vibrant colors, nature-inspired motifs, cultural storytelling, symmetry, folk art aesthetics", + "negative_prompt": "abstract, muted colors, minimalistic design, modern aesthetics" + }, + { + "name": "MK pictorialism", + "prompt": "Pictorialism illustration{prompt} . soft focus, atmospheric effects, artistic interpretation, tonality, muted colors, evocative storytelling", + "negative_prompt": "sharp focus, high contrast, realistic depiction, vivid colors" + }, + { + "name": "MK pichwai-painting", + "prompt": "Pichwai painting {prompt} . intricate detailing, vibrant colors, religious themes, nature motifs, devotional storytelling, gold leaf accents", + "negative_prompt": "minimalist, subdued colors, abstract design" + }, + { + "name": "MK patachitra-painting", + "prompt": "Patachitra painting {prompt} . bold outlines, vibrant colors, intricate detailing, mythological themes, storytelling, traditional craftsmanship", + "negative_prompt": "subdued colors, minimalistic, abstract, modern aesthetics" + }, + { + "name": "MK samoan-art-inspired", + "prompt": "Samoan art-inspired wooden sculpture {prompt} . traditional motifs, natural elements, bold colors, cultural symbolism, storytelling, craftsmanship", + "negative_prompt": "modern aesthetics, minimalist, abstract" + }, + { + "name": "MK tlingit-art", + "prompt": "Tlingit art {prompt} . formline design, natural elements, animal motifs, bold colors, cultural storytelling, traditional craftsmanship, Alaska traditional art, (totem:1.5)", + "negative_prompt": "" + }, + { + "name": "MK adnate-style", + "prompt": "Painting by Adnate {prompt} . realistic portraits, street art, large-scale murals, subdued color palette, social narratives", + "negative_prompt": "abstract, vibrant colors, small-scale art" + }, + { + "name": "MK ron-english-style", + "prompt": "Painting by Ron English {prompt} . pop-surrealism, cultural subversion, iconic mash-ups, vibrant and bold colors, satirical commentary", + "negative_prompt": "traditional, monochromatic" + }, + { + "name": "MK shepard-fairey-style", + "prompt": "Painting by Shepard Fairey {prompt} . street art, political activism, iconic stencils, bold typography, high contrast, red, black, and white color palette", + "negative_prompt": "traditional, muted colors" + } +] diff --git a/sdxl_styles/sdxl_styles_mre.json b/sdxl_styles/sdxl_styles_mre.json new file mode 100644 index 0000000000000000000000000000000000000000..c18c3100984ac7e2582d6d46db6f30aba23911b1 --- /dev/null +++ b/sdxl_styles/sdxl_styles_mre.json @@ -0,0 +1,112 @@ +[ + { + "name": "mre-cinematic-dynamic", + "prompt": "epic cinematic shot of dynamic {prompt} in motion. main subject of high budget action movie. raw photo, motion blur. best quality, high resolution", + "negative_prompt": "static, still, motionless, sluggish. drawing, painting, illustration, rendered. low budget. low quality, low resolution" + }, + { + "name": "mre-spontaneous-picture", + "prompt": "spontaneous picture of {prompt}, taken by talented amateur. best quality, high resolution. magical moment, natural look. simple but good looking", + "negative_prompt": "overthinked. low quality, low resolution" + }, + { + "name": "mre-artistic-vision", + "prompt": "powerful artistic vision of {prompt}. breathtaking masterpiece made by great artist. best quality, high resolution", + "negative_prompt": "insignificant, flawed, made by bad artist. low quality, low resolution" + }, + { + "name": "mre-dark-dream", + "prompt": "dark and unsettling dream showing {prompt}. best quality, high resolution. created by genius but depressed mad artist. grim beauty", + "negative_prompt": "naive, cheerful. comfortable, casual, boring, cliche. low quality, low resolution" + }, + { + "name": "mre-gloomy-art", + "prompt": "astonishing gloomy art made mainly of shadows and lighting, forming {prompt}. masterful usage of lighting, shadows and chiaroscuro. made by black-hearted artist, drawing from darkness. best quality, high resolution", + "negative_prompt": "low quality, low resolution" + }, + { + "name": "mre-bad-dream", + "prompt": "picture from really bad dream about terrifying {prompt}, true horror. bone-chilling vision. mad world that shouldn't exist. best quality, high resolution", + "negative_prompt": "nice dream, pleasant experience. low quality, low resolution" + }, + { + "name": "mre-underground", + "prompt": "uncanny caliginous vision of {prompt}, created by remarkable underground artist. best quality, high resolution. raw and brutal art, careless but impressive style. inspired by darkness and chaos", + "negative_prompt": "photography, mainstream, civilized. low quality, low resolution" + }, + { + "name": "mre-surreal-painting", + "prompt": "surreal painting representing strange vision of {prompt}. harmonious madness, synergy with chance. unique artstyle, mindbending art, magical surrealism. best quality, high resolution", + "negative_prompt": "photography, illustration, drawing. realistic, possible. logical, sane. low quality, low resolution" + }, + { + "name": "mre-dynamic-illustration", + "prompt": "insanely dynamic illustration of {prompt}. best quality, high resolution. crazy artstyle, careless brushstrokes, emotional and fun", + "negative_prompt": "photography, realistic. static, still, slow, boring. low quality, low resolution" + }, + { + "name": "mre-undead-art", + "prompt": "long forgotten art created by undead artist illustrating {prompt}, tribute to the death and decay. miserable art of the damned. wretched and decaying world. best quality, high resolution", + "negative_prompt": "alive, playful, living. low quality, low resolution" + }, + { + "name": "mre-elemental-art", + "prompt": "art illustrating insane amounts of raging elemental energy turning into {prompt}, avatar of elements. magical surrealism, wizardry. best quality, high resolution", + "negative_prompt": "photography, realistic, real. low quality, low resolution" + }, + { + "name": "mre-space-art", + "prompt": "winner of inter-galactic art contest illustrating {prompt}, symbol of the interstellar singularity. best quality, high resolution. artstyle previously unseen in the whole galaxy", + "negative_prompt": "created by human race, low quality, low resolution" + }, + { + "name": "mre-ancient-illustration", + "prompt": "sublime ancient illustration of {prompt}, predating human civilization. crude and simple, but also surprisingly beautiful artwork, made by genius primeval artist. best quality, high resolution", + "negative_prompt": "low quality, low resolution" + }, + { + "name": "mre-brave-art", + "prompt": "brave, shocking, and brutally true art showing {prompt}. inspired by courage and unlimited creativity. truth found in chaos. best quality, high resolution", + "negative_prompt": "low quality, low resolution" + }, + { + "name": "mre-heroic-fantasy", + "prompt": "heroic fantasy painting of {prompt}, in the dangerous fantasy world. airbrush over oil on canvas. best quality, high resolution", + "negative_prompt": "low quality, low resolution" + }, + { + "name": "mre-dark-cyberpunk", + "prompt": "dark cyberpunk illustration of brutal {prompt} in a world without hope, ruled by ruthless criminal corporations. best quality, high resolution", + "negative_prompt": "low quality, low resolution" + }, + { + "name": "mre-lyrical-geometry", + "prompt": "geometric and lyrical abstraction painting presenting {prompt}. oil on metal. best quality, high resolution", + "negative_prompt": "photography, realistic, drawing, rendered. low quality, low resolution" + }, + { + "name": "mre-sumi-e-symbolic", + "prompt": "big long brushstrokes of deep black sumi-e turning into symbolic painting of {prompt}. master level raw art. best quality, high resolution", + "negative_prompt": "photography, rendered. low quality, low resolution" + }, + { + "name": "mre-sumi-e-detailed", + "prompt": "highly detailed black sumi-e painting of {prompt}. in-depth study of perfection, created by a master. best quality, high resolution", + "negative_prompt": "low quality, low resolution" + }, + { + "name": "mre-manga", + "prompt": "manga artwork presenting {prompt}. created by japanese manga artist. highly emotional. best quality, high resolution", + "negative_prompt": "low quality, low resolution" + }, + { + "name": "mre-anime", + "prompt": "anime artwork illustrating {prompt}. created by japanese anime studio. highly emotional. best quality, high resolution", + "negative_prompt": "low quality, low resolution" + }, + { + "name": "mre-comic", + "prompt": "breathtaking illustration from adult comic book presenting {prompt}. fabulous artwork. best quality, high resolution", + "negative_prompt": "deformed, ugly, low quality, low resolution" + } +] diff --git a/sdxl_styles/sdxl_styles_sai.json b/sdxl_styles/sdxl_styles_sai.json new file mode 100644 index 0000000000000000000000000000000000000000..a705b2431bf09735b276d26afbb8d55e9516ffcc --- /dev/null +++ b/sdxl_styles/sdxl_styles_sai.json @@ -0,0 +1,87 @@ +[ + { + "name": "sai-3d-model", + "prompt": "professional 3d model {prompt} . octane render, highly detailed, volumetric, dramatic lighting", + "negative_prompt": "ugly, deformed, noisy, low poly, blurry, painting" + }, + { + "name": "sai-analog film", + "prompt": "analog film photo {prompt} . faded film, desaturated, 35mm photo, grainy, vignette, vintage, Kodachrome, Lomography, stained, highly detailed, found footage", + "negative_prompt": "painting, drawing, illustration, glitch, deformed, mutated, cross-eyed, ugly, disfigured" + }, + { + "name": "sai-anime", + "prompt": "anime artwork {prompt} . anime style, key visual, vibrant, studio anime, highly detailed", + "negative_prompt": "photo, deformed, black and white, realism, disfigured, low contrast" + }, + { + "name": "sai-cinematic", + "prompt": "cinematic film still {prompt} . shallow depth of field, vignette, highly detailed, high budget, bokeh, cinemascope, moody, epic, gorgeous, film grain, grainy", + "negative_prompt": "anime, cartoon, graphic, text, painting, crayon, graphite, abstract, glitch, deformed, mutated, ugly, disfigured" + }, + { + "name": "sai-comic book", + "prompt": "comic {prompt} . graphic illustration, comic art, graphic novel art, vibrant, highly detailed", + "negative_prompt": "photograph, deformed, glitch, noisy, realistic, stock photo" + }, + { + "name": "sai-craft clay", + "prompt": "play-doh style {prompt} . sculpture, clay art, centered composition, Claymation", + "negative_prompt": "sloppy, messy, grainy, highly detailed, ultra textured, photo" + }, + { + "name": "sai-digital art", + "prompt": "concept art {prompt} . digital artwork, illustrative, painterly, matte painting, highly detailed", + "negative_prompt": "photo, photorealistic, realism, ugly" + }, + { + "name": "sai-enhance", + "prompt": "breathtaking {prompt} . award-winning, professional, highly detailed", + "negative_prompt": "ugly, deformed, noisy, blurry, distorted, grainy" + }, + { + "name": "sai-fantasy art", + "prompt": "ethereal fantasy concept art of {prompt} . magnificent, celestial, ethereal, painterly, epic, majestic, magical, fantasy art, cover art, dreamy", + "negative_prompt": "photographic, realistic, realism, 35mm film, dslr, cropped, frame, text, deformed, glitch, noise, noisy, off-center, deformed, cross-eyed, closed eyes, bad anatomy, ugly, disfigured, sloppy, duplicate, mutated, black and white" + }, + { + "name": "sai-isometric", + "prompt": "isometric style {prompt} . vibrant, beautiful, crisp, detailed, ultra detailed, intricate", + "negative_prompt": "deformed, mutated, ugly, disfigured, blur, blurry, noise, noisy, realistic, photographic" + }, + { + "name": "sai-line art", + "prompt": "line art drawing {prompt} . professional, sleek, modern, minimalist, graphic, line art, vector graphics", + "negative_prompt": "anime, photorealistic, 35mm film, deformed, glitch, blurry, noisy, off-center, deformed, cross-eyed, closed eyes, bad anatomy, ugly, disfigured, mutated, realism, realistic, impressionism, expressionism, oil, acrylic" + }, + { + "name": "sai-lowpoly", + "prompt": "low-poly style {prompt} . low-poly game art, polygon mesh, jagged, blocky, wireframe edges, centered composition", + "negative_prompt": "noisy, sloppy, messy, grainy, highly detailed, ultra textured, photo" + }, + { + "name": "sai-neonpunk", + "prompt": "neonpunk style {prompt} . cyberpunk, vaporwave, neon, vibes, vibrant, stunningly beautiful, crisp, detailed, sleek, ultramodern, magenta highlights, dark purple shadows, high contrast, cinematic, ultra detailed, intricate, professional", + "negative_prompt": "painting, drawing, illustration, glitch, deformed, mutated, cross-eyed, ugly, disfigured" + }, + { + "name": "sai-origami", + "prompt": "origami style {prompt} . paper art, pleated paper, folded, origami art, pleats, cut and fold, centered composition", + "negative_prompt": "noisy, sloppy, messy, grainy, highly detailed, ultra textured, photo" + }, + { + "name": "sai-photographic", + "prompt": "cinematic photo {prompt} . 35mm photograph, film, bokeh, professional, 4k, highly detailed", + "negative_prompt": "drawing, painting, crayon, sketch, graphite, impressionist, noisy, blurry, soft, deformed, ugly" + }, + { + "name": "sai-pixel art", + "prompt": "pixel-art {prompt} . low-res, blocky, pixel art style, 8-bit graphics", + "negative_prompt": "sloppy, messy, blurry, noisy, highly detailed, ultra textured, photo, realistic" + }, + { + "name": "sai-texture", + "prompt": "texture {prompt} top down close-up", + "negative_prompt": "ugly, deformed, noisy, blurry" + } +] \ No newline at end of file diff --git a/sdxl_styles/sdxl_styles_twri.json b/sdxl_styles/sdxl_styles_twri.json new file mode 100644 index 0000000000000000000000000000000000000000..687a19d2bf97d7d9f40d46fd4d89fec4a9f057b6 --- /dev/null +++ b/sdxl_styles/sdxl_styles_twri.json @@ -0,0 +1,447 @@ +[ + { + "name": "ads-advertising", + "prompt": "advertising poster style {prompt} . Professional, modern, product-focused, commercial, eye-catching, highly detailed", + "negative_prompt": "noisy, blurry, amateurish, sloppy, unattractive" + }, + { + "name": "ads-automotive", + "prompt": "automotive advertisement style {prompt} . sleek, dynamic, professional, commercial, vehicle-focused, high-resolution, highly detailed", + "negative_prompt": "noisy, blurry, unattractive, sloppy, unprofessional" + }, + { + "name": "ads-corporate", + "prompt": "corporate branding style {prompt} . professional, clean, modern, sleek, minimalist, business-oriented, highly detailed", + "negative_prompt": "noisy, blurry, grungy, sloppy, cluttered, disorganized" + }, + { + "name": "ads-fashion editorial", + "prompt": "fashion editorial style {prompt} . high fashion, trendy, stylish, editorial, magazine style, professional, highly detailed", + "negative_prompt": "outdated, blurry, noisy, unattractive, sloppy" + }, + { + "name": "ads-food photography", + "prompt": "food photography style {prompt} . appetizing, professional, culinary, high-resolution, commercial, highly detailed", + "negative_prompt": "unappetizing, sloppy, unprofessional, noisy, blurry" + }, + { + "name": "ads-gourmet food photography", + "prompt": "gourmet food photo of {prompt} . soft natural lighting, macro details, vibrant colors, fresh ingredients, glistening textures, bokeh background, styled plating, wooden tabletop, garnished, tantalizing, editorial quality", + "negative_prompt": "cartoon, anime, sketch, grayscale, dull, overexposed, cluttered, messy plate, deformed" + }, + { + "name": "ads-luxury", + "prompt": "luxury product style {prompt} . elegant, sophisticated, high-end, luxurious, professional, highly detailed", + "negative_prompt": "cheap, noisy, blurry, unattractive, amateurish" + }, + { + "name": "ads-real estate", + "prompt": "real estate photography style {prompt} . professional, inviting, well-lit, high-resolution, property-focused, commercial, highly detailed", + "negative_prompt": "dark, blurry, unappealing, noisy, unprofessional" + }, + { + "name": "ads-retail", + "prompt": "retail packaging style {prompt} . vibrant, enticing, commercial, product-focused, eye-catching, professional, highly detailed", + "negative_prompt": "noisy, blurry, amateurish, sloppy, unattractive" + }, + { + "name": "artstyle-abstract", + "prompt": "abstract style {prompt} . non-representational, colors and shapes, expression of feelings, imaginative, highly detailed", + "negative_prompt": "realistic, photographic, figurative, concrete" + }, + { + "name": "artstyle-abstract expressionism", + "prompt": "abstract expressionist painting {prompt} . energetic brushwork, bold colors, abstract forms, expressive, emotional", + "negative_prompt": "realistic, photorealistic, low contrast, plain, simple, monochrome" + }, + { + "name": "artstyle-art deco", + "prompt": "art deco style {prompt} . geometric shapes, bold colors, luxurious, elegant, decorative, symmetrical, ornate, detailed", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realism, photorealistic, modernist, minimalist" + }, + { + "name": "artstyle-art nouveau", + "prompt": "art nouveau style {prompt} . elegant, decorative, curvilinear forms, nature-inspired, ornate, detailed", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realism, photorealistic, modernist, minimalist" + }, + { + "name": "artstyle-constructivist", + "prompt": "constructivist style {prompt} . geometric shapes, bold colors, dynamic composition, propaganda art style", + "negative_prompt": "realistic, photorealistic, low contrast, plain, simple, abstract expressionism" + }, + { + "name": "artstyle-cubist", + "prompt": "cubist artwork {prompt} . geometric shapes, abstract, innovative, revolutionary", + "negative_prompt": "anime, photorealistic, 35mm film, deformed, glitch, low contrast, noisy" + }, + { + "name": "artstyle-expressionist", + "prompt": "expressionist {prompt} . raw, emotional, dynamic, distortion for emotional effect, vibrant, use of unusual colors, detailed", + "negative_prompt": "realism, symmetry, quiet, calm, photo" + }, + { + "name": "artstyle-graffiti", + "prompt": "graffiti style {prompt} . street art, vibrant, urban, detailed, tag, mural", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realism, photorealistic" + }, + { + "name": "artstyle-hyperrealism", + "prompt": "hyperrealistic art {prompt} . extremely high-resolution details, photographic, realism pushed to extreme, fine texture, incredibly lifelike", + "negative_prompt": "simplified, abstract, unrealistic, impressionistic, low resolution" + }, + { + "name": "artstyle-impressionist", + "prompt": "impressionist painting {prompt} . loose brushwork, vibrant color, light and shadow play, captures feeling over form", + "negative_prompt": "anime, photorealistic, 35mm film, deformed, glitch, low contrast, noisy" + }, + { + "name": "artstyle-pointillism", + "prompt": "pointillism style {prompt} . composed entirely of small, distinct dots of color, vibrant, highly detailed", + "negative_prompt": "line drawing, smooth shading, large color fields, simplistic" + }, + { + "name": "artstyle-pop art", + "prompt": "pop Art style {prompt} . bright colors, bold outlines, popular culture themes, ironic or kitsch", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realism, photorealistic, minimalist" + }, + { + "name": "artstyle-psychedelic", + "prompt": "psychedelic style {prompt} . vibrant colors, swirling patterns, abstract forms, surreal, trippy", + "negative_prompt": "monochrome, black and white, low contrast, realistic, photorealistic, plain, simple" + }, + { + "name": "artstyle-renaissance", + "prompt": "renaissance style {prompt} . realistic, perspective, light and shadow, religious or mythological themes, highly detailed", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, modernist, minimalist, abstract" + }, + { + "name": "artstyle-steampunk", + "prompt": "steampunk style {prompt} . antique, mechanical, brass and copper tones, gears, intricate, detailed", + "negative_prompt": "deformed, glitch, noisy, low contrast, anime, photorealistic" + }, + { + "name": "artstyle-surrealist", + "prompt": "surrealist art {prompt} . dreamlike, mysterious, provocative, symbolic, intricate, detailed", + "negative_prompt": "anime, photorealistic, realistic, deformed, glitch, noisy, low contrast" + }, + { + "name": "artstyle-typography", + "prompt": "typographic art {prompt} . stylized, intricate, detailed, artistic, text-based", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realism, photorealistic" + }, + { + "name": "artstyle-watercolor", + "prompt": "watercolor painting {prompt} . vibrant, beautiful, painterly, detailed, textural, artistic", + "negative_prompt": "anime, photorealistic, 35mm film, deformed, glitch, low contrast, noisy" + }, + { + "name": "futuristic-biomechanical", + "prompt": "biomechanical style {prompt} . blend of organic and mechanical elements, futuristic, cybernetic, detailed, intricate", + "negative_prompt": "natural, rustic, primitive, organic, simplistic" + }, + { + "name": "futuristic-biomechanical cyberpunk", + "prompt": "biomechanical cyberpunk {prompt} . cybernetics, human-machine fusion, dystopian, organic meets artificial, dark, intricate, highly detailed", + "negative_prompt": "natural, colorful, deformed, sketch, low contrast, watercolor" + }, + { + "name": "futuristic-cybernetic", + "prompt": "cybernetic style {prompt} . futuristic, technological, cybernetic enhancements, robotics, artificial intelligence themes", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realism, photorealistic, historical, medieval" + }, + { + "name": "futuristic-cybernetic robot", + "prompt": "cybernetic robot {prompt} . android, AI, machine, metal, wires, tech, futuristic, highly detailed", + "negative_prompt": "organic, natural, human, sketch, watercolor, low contrast" + }, + { + "name": "futuristic-cyberpunk cityscape", + "prompt": "cyberpunk cityscape {prompt} . neon lights, dark alleys, skyscrapers, futuristic, vibrant colors, high contrast, highly detailed", + "negative_prompt": "natural, rural, deformed, low contrast, black and white, sketch, watercolor" + }, + { + "name": "futuristic-futuristic", + "prompt": "futuristic style {prompt} . sleek, modern, ultramodern, high tech, detailed", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realism, photorealistic, vintage, antique" + }, + { + "name": "futuristic-retro cyberpunk", + "prompt": "retro cyberpunk {prompt} . 80's inspired, synthwave, neon, vibrant, detailed, retro futurism", + "negative_prompt": "modern, desaturated, black and white, realism, low contrast" + }, + { + "name": "futuristic-retro futurism", + "prompt": "retro-futuristic {prompt} . vintage sci-fi, 50s and 60s style, atomic age, vibrant, highly detailed", + "negative_prompt": "contemporary, realistic, rustic, primitive" + }, + { + "name": "futuristic-sci-fi", + "prompt": "sci-fi style {prompt} . futuristic, technological, alien worlds, space themes, advanced civilizations", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realism, photorealistic, historical, medieval" + }, + { + "name": "futuristic-vaporwave", + "prompt": "vaporwave style {prompt} . retro aesthetic, cyberpunk, vibrant, neon colors, vintage 80s and 90s style, highly detailed", + "negative_prompt": "monochrome, muted colors, realism, rustic, minimalist, dark" + }, + { + "name": "game-bubble bobble", + "prompt": "Bubble Bobble style {prompt} . 8-bit, cute, pixelated, fantasy, vibrant, reminiscent of Bubble Bobble game", + "negative_prompt": "realistic, modern, photorealistic, violent, horror" + }, + { + "name": "game-cyberpunk game", + "prompt": "cyberpunk game style {prompt} . neon, dystopian, futuristic, digital, vibrant, detailed, high contrast, reminiscent of cyberpunk genre video games", + "negative_prompt": "historical, natural, rustic, low detailed" + }, + { + "name": "game-fighting game", + "prompt": "fighting game style {prompt} . dynamic, vibrant, action-packed, detailed character design, reminiscent of fighting video games", + "negative_prompt": "peaceful, calm, minimalist, photorealistic" + }, + { + "name": "game-gta", + "prompt": "GTA-style artwork {prompt} . satirical, exaggerated, pop art style, vibrant colors, iconic characters, action-packed", + "negative_prompt": "realistic, black and white, low contrast, impressionist, cubist, noisy, blurry, deformed" + }, + { + "name": "game-mario", + "prompt": "Super Mario style {prompt} . vibrant, cute, cartoony, fantasy, playful, reminiscent of Super Mario series", + "negative_prompt": "realistic, modern, horror, dystopian, violent" + }, + { + "name": "game-minecraft", + "prompt": "Minecraft style {prompt} . blocky, pixelated, vibrant colors, recognizable characters and objects, game assets", + "negative_prompt": "smooth, realistic, detailed, photorealistic, noise, blurry, deformed" + }, + { + "name": "game-pokemon", + "prompt": "Pokémon style {prompt} . vibrant, cute, anime, fantasy, reminiscent of Pokémon series", + "negative_prompt": "realistic, modern, horror, dystopian, violent" + }, + { + "name": "game-retro arcade", + "prompt": "retro arcade style {prompt} . 8-bit, pixelated, vibrant, classic video game, old school gaming, reminiscent of 80s and 90s arcade games", + "negative_prompt": "modern, ultra-high resolution, photorealistic, 3D" + }, + { + "name": "game-retro game", + "prompt": "retro game art {prompt} . 16-bit, vibrant colors, pixelated, nostalgic, charming, fun", + "negative_prompt": "realistic, photorealistic, 35mm film, deformed, glitch, low contrast, noisy" + }, + { + "name": "game-rpg fantasy game", + "prompt": "role-playing game (RPG) style fantasy {prompt} . detailed, vibrant, immersive, reminiscent of high fantasy RPG games", + "negative_prompt": "sci-fi, modern, urban, futuristic, low detailed" + }, + { + "name": "game-strategy game", + "prompt": "strategy game style {prompt} . overhead view, detailed map, units, reminiscent of real-time strategy video games", + "negative_prompt": "first-person view, modern, photorealistic" + }, + { + "name": "game-streetfighter", + "prompt": "Street Fighter style {prompt} . vibrant, dynamic, arcade, 2D fighting game, highly detailed, reminiscent of Street Fighter series", + "negative_prompt": "3D, realistic, modern, photorealistic, turn-based strategy" + }, + { + "name": "game-zelda", + "prompt": "Legend of Zelda style {prompt} . vibrant, fantasy, detailed, epic, heroic, reminiscent of The Legend of Zelda series", + "negative_prompt": "sci-fi, modern, realistic, horror" + }, + { + "name": "misc-architectural", + "prompt": "architectural style {prompt} . clean lines, geometric shapes, minimalist, modern, architectural drawing, highly detailed", + "negative_prompt": "curved lines, ornate, baroque, abstract, grunge" + }, + { + "name": "misc-disco", + "prompt": "disco-themed {prompt} . vibrant, groovy, retro 70s style, shiny disco balls, neon lights, dance floor, highly detailed", + "negative_prompt": "minimalist, rustic, monochrome, contemporary, simplistic" + }, + { + "name": "misc-dreamscape", + "prompt": "dreamscape {prompt} . surreal, ethereal, dreamy, mysterious, fantasy, highly detailed", + "negative_prompt": "realistic, concrete, ordinary, mundane" + }, + { + "name": "misc-dystopian", + "prompt": "dystopian style {prompt} . bleak, post-apocalyptic, somber, dramatic, highly detailed", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, cheerful, optimistic, vibrant, colorful" + }, + { + "name": "misc-fairy tale", + "prompt": "fairy tale {prompt} . magical, fantastical, enchanting, storybook style, highly detailed", + "negative_prompt": "realistic, modern, ordinary, mundane" + }, + { + "name": "misc-gothic", + "prompt": "gothic style {prompt} . dark, mysterious, haunting, dramatic, ornate, detailed", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realism, photorealistic, cheerful, optimistic" + }, + { + "name": "misc-grunge", + "prompt": "grunge style {prompt} . textured, distressed, vintage, edgy, punk rock vibe, dirty, noisy", + "negative_prompt": "smooth, clean, minimalist, sleek, modern, photorealistic" + }, + { + "name": "misc-horror", + "prompt": "horror-themed {prompt} . eerie, unsettling, dark, spooky, suspenseful, grim, highly detailed", + "negative_prompt": "cheerful, bright, vibrant, light-hearted, cute" + }, + { + "name": "misc-kawaii", + "prompt": "kawaii style {prompt} . cute, adorable, brightly colored, cheerful, anime influence, highly detailed", + "negative_prompt": "dark, scary, realistic, monochrome, abstract" + }, + { + "name": "misc-lovecraftian", + "prompt": "lovecraftian horror {prompt} . eldritch, cosmic horror, unknown, mysterious, surreal, highly detailed", + "negative_prompt": "light-hearted, mundane, familiar, simplistic, realistic" + }, + { + "name": "misc-macabre", + "prompt": "macabre style {prompt} . dark, gothic, grim, haunting, highly detailed", + "negative_prompt": "bright, cheerful, light-hearted, cartoonish, cute" + }, + { + "name": "misc-manga", + "prompt": "manga style {prompt} . vibrant, high-energy, detailed, iconic, Japanese comic style", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realism, photorealistic, Western comic style" + }, + { + "name": "misc-metropolis", + "prompt": "metropolis-themed {prompt} . urban, cityscape, skyscrapers, modern, futuristic, highly detailed", + "negative_prompt": "rural, natural, rustic, historical, simple" + }, + { + "name": "misc-minimalist", + "prompt": "minimalist style {prompt} . simple, clean, uncluttered, modern, elegant", + "negative_prompt": "ornate, complicated, highly detailed, cluttered, disordered, messy, noisy" + }, + { + "name": "misc-monochrome", + "prompt": "monochrome {prompt} . black and white, contrast, tone, texture, detailed", + "negative_prompt": "colorful, vibrant, noisy, blurry, deformed" + }, + { + "name": "misc-nautical", + "prompt": "nautical-themed {prompt} . sea, ocean, ships, maritime, beach, marine life, highly detailed", + "negative_prompt": "landlocked, desert, mountains, urban, rustic" + }, + { + "name": "misc-space", + "prompt": "space-themed {prompt} . cosmic, celestial, stars, galaxies, nebulas, planets, science fiction, highly detailed", + "negative_prompt": "earthly, mundane, ground-based, realism" + }, + { + "name": "misc-stained glass", + "prompt": "stained glass style {prompt} . vibrant, beautiful, translucent, intricate, detailed", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realism, photorealistic" + }, + { + "name": "misc-techwear fashion", + "prompt": "techwear fashion {prompt} . futuristic, cyberpunk, urban, tactical, sleek, dark, highly detailed", + "negative_prompt": "vintage, rural, colorful, low contrast, realism, sketch, watercolor" + }, + { + "name": "misc-tribal", + "prompt": "tribal style {prompt} . indigenous, ethnic, traditional patterns, bold, natural colors, highly detailed", + "negative_prompt": "modern, futuristic, minimalist, pastel" + }, + { + "name": "misc-zentangle", + "prompt": "zentangle {prompt} . intricate, abstract, monochrome, patterns, meditative, highly detailed", + "negative_prompt": "colorful, representative, simplistic, large fields of color" + }, + { + "name": "papercraft-collage", + "prompt": "collage style {prompt} . mixed media, layered, textural, detailed, artistic", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realism, photorealistic" + }, + { + "name": "papercraft-flat papercut", + "prompt": "flat papercut style {prompt} . silhouette, clean cuts, paper, sharp edges, minimalist, color block", + "negative_prompt": "3D, high detail, noise, grainy, blurry, painting, drawing, photo, disfigured" + }, + { + "name": "papercraft-kirigami", + "prompt": "kirigami representation of {prompt} . 3D, paper folding, paper cutting, Japanese, intricate, symmetrical, precision, clean lines", + "negative_prompt": "painting, drawing, 2D, noisy, blurry, deformed" + }, + { + "name": "papercraft-paper mache", + "prompt": "paper mache representation of {prompt} . 3D, sculptural, textured, handmade, vibrant, fun", + "negative_prompt": "2D, flat, photo, sketch, digital art, deformed, noisy, blurry" + }, + { + "name": "papercraft-paper quilling", + "prompt": "paper quilling art of {prompt} . intricate, delicate, curling, rolling, shaping, coiling, loops, 3D, dimensional, ornamental", + "negative_prompt": "photo, painting, drawing, 2D, flat, deformed, noisy, blurry" + }, + { + "name": "papercraft-papercut collage", + "prompt": "papercut collage of {prompt} . mixed media, textured paper, overlapping, asymmetrical, abstract, vibrant", + "negative_prompt": "photo, 3D, realistic, drawing, painting, high detail, disfigured" + }, + { + "name": "papercraft-papercut shadow box", + "prompt": "3D papercut shadow box of {prompt} . layered, dimensional, depth, silhouette, shadow, papercut, handmade, high contrast", + "negative_prompt": "painting, drawing, photo, 2D, flat, high detail, blurry, noisy, disfigured" + }, + { + "name": "papercraft-stacked papercut", + "prompt": "stacked papercut art of {prompt} . 3D, layered, dimensional, depth, precision cut, stacked layers, papercut, high contrast", + "negative_prompt": "2D, flat, noisy, blurry, painting, drawing, photo, deformed" + }, + { + "name": "papercraft-thick layered papercut", + "prompt": "thick layered papercut art of {prompt} . deep 3D, volumetric, dimensional, depth, thick paper, high stack, heavy texture, tangible layers", + "negative_prompt": "2D, flat, thin paper, low stack, smooth texture, painting, drawing, photo, deformed" + }, + { + "name": "photo-alien", + "prompt": "alien-themed {prompt} . extraterrestrial, cosmic, otherworldly, mysterious, sci-fi, highly detailed", + "negative_prompt": "earthly, mundane, common, realistic, simple" + }, + { + "name": "photo-film noir", + "prompt": "film noir style {prompt} . monochrome, high contrast, dramatic shadows, 1940s style, mysterious, cinematic", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, realism, photorealistic, vibrant, colorful" + }, + { + "name": "photo-glamour", + "prompt": "glamorous photo {prompt} . high fashion, luxurious, extravagant, stylish, sensual, opulent, elegance, stunning beauty, professional, high contrast, detailed", + "negative_prompt": "ugly, deformed, noisy, blurry, distorted, grainy, sketch, low contrast, dull, plain, modest" + }, + { + "name": "photo-hdr", + "prompt": "HDR photo of {prompt} . High dynamic range, vivid, rich details, clear shadows and highlights, realistic, intense, enhanced contrast, highly detailed", + "negative_prompt": "flat, low contrast, oversaturated, underexposed, overexposed, blurred, noisy" + }, + { + "name": "photo-iphone photographic", + "prompt": "iphone photo {prompt} . large depth of field, deep depth of field, highly detailed", + "negative_prompt": "drawing, painting, crayon, sketch, graphite, impressionist, noisy, blurry, soft, deformed, ugly, shallow depth of field, bokeh" + }, + { + "name": "photo-long exposure", + "prompt": "long exposure photo of {prompt} . Blurred motion, streaks of light, surreal, dreamy, ghosting effect, highly detailed", + "negative_prompt": "static, noisy, deformed, shaky, abrupt, flat, low contrast" + }, + { + "name": "photo-neon noir", + "prompt": "neon noir {prompt} . cyberpunk, dark, rainy streets, neon signs, high contrast, low light, vibrant, highly detailed", + "negative_prompt": "bright, sunny, daytime, low contrast, black and white, sketch, watercolor" + }, + { + "name": "photo-silhouette", + "prompt": "silhouette style {prompt} . high contrast, minimalistic, black and white, stark, dramatic", + "negative_prompt": "ugly, deformed, noisy, blurry, low contrast, color, realism, photorealistic" + }, + { + "name": "photo-tilt-shift", + "prompt": "tilt-shift photo of {prompt} . selective focus, miniature effect, blurred background, highly detailed, vibrant, perspective control", + "negative_prompt": "blurry, noisy, deformed, flat, low contrast, unrealistic, oversaturated, underexposed" + } +] \ No newline at end of file diff --git a/shared.py b/shared.py new file mode 100644 index 0000000000000000000000000000000000000000..21a2a864bb2e139cc12a336cebcec2fbf8bfd36b --- /dev/null +++ b/shared.py @@ -0,0 +1 @@ +gradio_root = None \ No newline at end of file diff --git a/sorted_styles.json b/sorted_styles.json new file mode 100644 index 0000000000000000000000000000000000000000..b806c7ee831e6fc393b93c6755bcb3f2ed27a58a --- /dev/null +++ b/sorted_styles.json @@ -0,0 +1,278 @@ +[ + "Fooocus V2", + "Fooocus Enhance", + "Fooocus Sharp", + "Fooocus Masterpiece", + "Fooocus Negative", + "SAI 3D Model", + "Fooocus Photograph", + "Fooocus Cinematic", + "SAI Analog Film", + "SAI Anime", + "SAI Comic Book", + "SAI Cinematic", + "SAI Craft Clay", + "SAI Digital Art", + "SAI Fantasy Art", + "SAI Lowpoly", + "SAI Isometric", + "SAI Enhance", + "SAI Neonpunk", + "SAI Origami", + "SAI Line Art", + "SAI Pixel Art", + "SAI Photographic", + "SAI Texture", + "MRE Cinematic Dynamic", + "MRE Artistic Vision", + "MRE Spontaneous Picture", + "MRE Dark Dream", + "MRE Gloomy Art", + "MRE Underground", + "MRE Bad Dream", + "MRE Surreal Painting", + "MRE Elemental Art", + "MRE Undead Art", + "MRE Dynamic Illustration", + "MRE Heroic Fantasy", + "MRE Ancient Illustration", + "MRE Space Art", + "MRE Brave Art", + "MRE Sumi E Symbolic", + "MRE Manga", + "MRE Anime", + "MRE Lyrical Geometry", + "MRE Dark Cyberpunk", + "MRE Sumi E Detailed", + "MRE Comic", + "Ads Automotive", + "Ads Fashion Editorial", + "Ads Corporate", + "Ads Advertising", + "Ads Food Photography", + "Ads Gourmet Food Photography", + "Ads Retail", + "Ads Luxury", + "Ads Real Estate", + "Artstyle Abstract", + "Artstyle Abstract Expressionism", + "Artstyle Art Deco", + "Artstyle Art Nouveau", + "Artstyle Cubist", + "Artstyle Constructivist", + "Artstyle Expressionist", + "Artstyle Graffiti", + "Artstyle Impressionist", + "Artstyle Pop Art", + "Artstyle Hyperrealism", + "Artstyle Pointillism", + "Artstyle Psychedelic", + "Artstyle Renaissance", + "Artstyle Surrealist", + "Artstyle Watercolor", + "Artstyle Typography", + "Artstyle Steampunk", + "Futuristic Biomechanical", + "Futuristic Biomechanical Cyberpunk", + "Futuristic Cybernetic", + "Futuristic Cybernetic Robot", + "Futuristic Cyberpunk Cityscape", + "Futuristic Futuristic", + "Futuristic Retro Futurism", + "Futuristic Vaporwave", + "Game Cyberpunk Game", + "Game Fighting Game", + "Game Bubble Bobble", + "Futuristic Sci Fi", + "Futuristic Retro Cyberpunk", + "Game Mario", + "Game Gta", + "Game Minecraft", + "Game Pokemon", + "Game Retro Game", + "Game Strategy Game", + "Game Rpg Fantasy Game", + "Game Retro Arcade", + "Game Streetfighter", + "Game Zelda", + "Misc Disco", + "Misc Architectural", + "Misc Dreamscape", + "Misc Fairy Tale", + "Misc Grunge", + "Misc Lovecraftian", + "Misc Manga", + "Misc Nautical", + "Misc Stained Glass", + "Misc Techwear Fashion", + "Misc Space", + "Misc Monochrome", + "Misc Metropolis", + "Misc Macabre", + "Misc Kawaii", + "Misc Minimalist", + "Misc Gothic", + "Misc Dystopian", + "Misc Horror", + "Misc Tribal", + "Misc Zentangle", + "Papercraft Flat Papercut", + "Papercraft Paper Mache", + "Papercraft Collage", + "Papercraft Kirigami", + "Papercraft Paper Quilling", + "Papercraft Papercut Shadow Box", + "Papercraft Stacked Papercut", + "Papercraft Thick Layered Papercut", + "Photo Alien", + "Photo Film Noir", + "Photo Hdr", + "Photo Glamour", + "Photo Iphone Photographic", + "Papercraft Papercut Collage", + "Photo Long Exposure", + "Photo Neon Noir", + "Photo Tilt Shift", + "Photo Silhouette", + "Cinematic Diva", + "Abstract Expressionism", + "Academia", + "Adorable 3D Character", + "Adorable Kawaii", + "Action Figure", + "Astral Aura", + "Art Nouveau", + "Baroque", + "Avant Garde", + "Art Deco", + "Bauhaus Style Poster", + "Blueprint Schematic Drawing", + "Caricature", + "Character Design Sheet", + "Cel Shaded Art", + "Classicism Art", + "Color Field Painting", + "Conceptual Art", + "Colored Pencil Art", + "Constructivism", + "Cubism", + "Dadaism", + "Dark Moody Atmosphere", + "Dark Fantasy", + "Dmt Art Style", + "Doodle Art", + "Dripping Paint Splatter Art", + "Double Exposure", + "Expressionism", + "Faded Polaroid Photo", + "Fortnite Art Style", + "Fauvism", + "Flat 2d Art", + "Glo Fi", + "Glitchcore", + "Futurism", + "Googie Art Style", + "Graffiti Art", + "Harlem Renaissance Art", + "Impressionism", + "Idyllic", + "Infographic Drawing", + "Ink Dripping Drawing", + "Knolling Photography", + "Japanese Ink Drawing", + "Light Cheery Atmosphere", + "Logo Design", + "Luxurious Elegance", + "Macro Photography", + "Marker Drawing", + "Mandola Art", + "Neo Byzantine", + "Minimalism", + "Neo Futurism", + "Neo Baroque", + "Medievalism", + "High Fashion", + "Neoclassicism", + "Neo Impressionism", + "Neo Rococo", + "Op Art", + "Pencil Sketch Drawing", + "Silhouette Art", + "Ornate And Intricate", + "Pop Art 2", + "Simple Vector Art", + "Rococo", + "Steampunk 2", + "Surrealism", + "Terragen", + "Sketchup", + "Suprematism", + "Sticker Designs", + "Volumetric Lighting", + "Whimsical And Playful", + "Watercolor 2", + "Vibrant Rim Light", + "Tranquil Relaxing Atmosphere", + "Mk Chromolithography", + "Mk Cross Processing Print", + "Mk Dufaycolor Photograph", + "Mk Punk Collage", + "Mk Herbarium", + "Mk Mosaic", + "Mk Coloring Book", + "Mk Van Gogh", + "Mk Pollock", + "Mk Basquiat", + "Mk Singer Sargent", + "Mk Halftone Print", + "Mk Andy Warhol", + "Mk Gond Painting", + "Mk Albumen Print", + "Mk Anthotype Print", + "Mk Aquatint Print", + "Mk Inuit Carving", + "Mk Bromoil Print", + "Mk Color Sketchnote", + "Mk Calotype Print", + "Mk Alcohol Ink Art", + "Mk Blacklight Paint", + "Mk One Line Art", + "Mk Cibulak Porcelain", + "Mk Cross Stitching", + "Mk Encaustic Paint", + "Mk Carnival Glass", + "Mk Cyanotype Print", + "Mk Embroidery", + "Mk Luminogram", + "Mk Suminagashi", + "Mk Mokume Gane", + "Mk Palekh", + "Mk Gyotaku", + "Mk Lite Brite Art", + "Pebble Art", + "Mk Scrimshaw", + "Mk Shibori", + "Mk Ukiyo E", + "Mk Vitreous Enamel", + "Mk Vintage Airline Poster", + "Mk Vintage Travel Poster", + "Mk Bauhaus Style", + "Mk Atompunk", + "Mk Afrofuturism", + "Mk Chicano Art", + "Mk Dayak Art", + "Mk Constructivism", + "Mk De Stijl", + "Mk Fayum Portrait", + "Mk Illuminated Manuscript", + "Mk Madhubani Painting", + "Mk Pichwai Painting", + "Mk Kalighat Painting", + "Mk Pictorialism", + "Mk Patachitra Painting", + "Mk Samoan Art Inspired", + "Mk Adnate Style", + "Mk Tlingit Art", + "Mk Ron English Style", + "Mk Shepard Fairey Style" +] \ No newline at end of file diff --git a/vercel.json b/vercel.json new file mode 100644 index 0000000000000000000000000000000000000000..c4a7a2e594d2e565cec861d1341ec967a2c89b59 --- /dev/null +++ b/vercel.json @@ -0,0 +1,14 @@ +{ + "builds": [ + { + "src": "app.py", + "use": "@vercel/python" + } + ], + "routes": [ + { + "src": "./", + "dest": "app.py" + } + ] +} diff --git a/webui.py b/webui.py new file mode 100644 index 0000000000000000000000000000000000000000..c2c449d46bb5b0d148be7b4a600cf816765df373 --- /dev/null +++ b/webui.py @@ -0,0 +1,688 @@ +import gradio as gr +import random +import os +import json +import time +import shared +import modules.config +import fooocus_version +import modules.html +import modules.async_worker as worker +import modules.constants as constants +import modules.flags as flags +import modules.gradio_hijack as grh +import modules.style_sorter as style_sorter +import modules.meta_parser +import args_manager +import copy + +from modules.sdxl_styles import legal_style_names +from modules.private_logger import get_current_html_path +from modules.ui_gradio_extensions import reload_javascript +from modules.auth import auth_enabled, check_auth +from modules.util import is_json + +def get_task(*args): + args = list(args) + args.pop(0) + + return worker.AsyncTask(args=args) + +def generate_clicked(task): + import ldm_patched.modules.model_management as model_management + + with model_management.interrupt_processing_mutex: + model_management.interrupt_processing = False + # outputs=[progress_html, progress_window, progress_gallery, gallery] + execution_start_time = time.perf_counter() + finished = False + + yield gr.update(visible=True, value=modules.html.make_progress_html(1, 'Waiting for task to start ...')), \ + gr.update(visible=True, value=None), \ + gr.update(visible=False, value=None), \ + gr.update(visible=False) + + worker.async_tasks.append(task) + + while not finished: + time.sleep(0.01) + if len(task.yields) > 0: + flag, product = task.yields.pop(0) + if flag == 'preview': + + # help bad internet connection by skipping duplicated preview + if len(task.yields) > 0: # if we have the next item + if task.yields[0][0] == 'preview': # if the next item is also a preview + # print('Skipped one preview for better internet connection.') + continue + + percentage, title, image = product + yield gr.update(visible=True, value=modules.html.make_progress_html(percentage, title)), \ + gr.update(visible=True, value=image) if image is not None else gr.update(), \ + gr.update(), \ + gr.update(visible=False) + if flag == 'results': + yield gr.update(visible=True), \ + gr.update(visible=True), \ + gr.update(visible=True, value=product), \ + gr.update(visible=False) + if flag == 'finish': + yield gr.update(visible=False), \ + gr.update(visible=False), \ + gr.update(visible=False), \ + gr.update(visible=True, value=product) + finished = True + + # delete Fooocus temp images, only keep gradio temp images + if args_manager.args.disable_image_log: + for filepath in product: + if isinstance(filepath, str) and os.path.exists(filepath): + os.remove(filepath) + + execution_time = time.perf_counter() - execution_start_time + print(f'Total time: {execution_time:.2f} seconds') + return + + +reload_javascript() + +title = f'Fooocus {fooocus_version.version}' + +if isinstance(args_manager.args.preset, str): + title += ' ' + args_manager.args.preset + +shared.gradio_root = gr.Blocks( + title=title, + css=modules.html.css).queue() + +with shared.gradio_root: + currentTask = gr.State(worker.AsyncTask(args=[])) + with gr.Row(): + with gr.Column(scale=2): + with gr.Row(): + progress_window = grh.Image(label='Preview', show_label=True, visible=False, height=768, + elem_classes=['main_view']) + progress_gallery = gr.Gallery(label='Finished Images', show_label=True, object_fit='contain', + height=768, visible=False, elem_classes=['main_view', 'image_gallery']) + progress_html = gr.HTML(value=modules.html.make_progress_html(32, 'Progress 32%'), visible=False, + elem_id='progress-bar', elem_classes='progress-bar') + gallery = gr.Gallery(label='Gallery', show_label=False, object_fit='contain', visible=True, height=768, + elem_classes=['resizable_area', 'main_view', 'final_gallery', 'image_gallery'], + elem_id='final_gallery') + with gr.Row(elem_classes='type_row'): + with gr.Column(scale=17): + prompt = gr.Textbox(show_label=False, placeholder="Type prompt here or paste parameters.", elem_id='positive_prompt', + container=False, autofocus=True, elem_classes='type_row', lines=1024) + + default_prompt = modules.config.default_prompt + if isinstance(default_prompt, str) and default_prompt != '': + shared.gradio_root.load(lambda: default_prompt, outputs=prompt) + + with gr.Column(scale=3, min_width=0): + generate_button = gr.Button(label="Generate", value="Generate", elem_classes='type_row', elem_id='generate_button', visible=True) + load_parameter_button = gr.Button(label="Load Parameters", value="Load Parameters", elem_classes='type_row', elem_id='load_parameter_button', visible=False) + skip_button = gr.Button(label="Skip", value="Skip", elem_classes='type_row_half', visible=False) + stop_button = gr.Button(label="Stop", value="Stop", elem_classes='type_row_half', elem_id='stop_button', visible=False) + + def stop_clicked(currentTask): + import ldm_patched.modules.model_management as model_management + currentTask.last_stop = 'stop' + if (currentTask.processing): + model_management.interrupt_current_processing() + return currentTask + + def skip_clicked(currentTask): + import ldm_patched.modules.model_management as model_management + currentTask.last_stop = 'skip' + if (currentTask.processing): + model_management.interrupt_current_processing() + return currentTask + + stop_button.click(stop_clicked, inputs=currentTask, outputs=currentTask, queue=False, show_progress=False, _js='cancelGenerateForever') + skip_button.click(skip_clicked, inputs=currentTask, outputs=currentTask, queue=False, show_progress=False) + with gr.Row(elem_classes='advanced_check_row'): + input_image_checkbox = gr.Checkbox(label='Input Image', value=False, container=False, elem_classes='min_check') + advanced_checkbox = gr.Checkbox(label='Advanced', value=modules.config.default_advanced_checkbox, container=False, elem_classes='min_check') + with gr.Row(visible=False) as image_input_panel: + with gr.Tabs(): + with gr.TabItem(label='Upscale or Variation') as uov_tab: + with gr.Row(): + with gr.Column(): + uov_input_image = grh.Image(label='Drag above image to here', source='upload', type='numpy') + with gr.Column(): + uov_method = gr.Radio(label='Upscale or Variation:', choices=flags.uov_list, value=flags.disabled) + gr.HTML('\U0001F4D4 Document') + with gr.TabItem(label='Image Prompt') as ip_tab: + with gr.Row(): + ip_images = [] + ip_types = [] + ip_stops = [] + ip_weights = [] + ip_ctrls = [] + ip_ad_cols = [] + for _ in range(flags.controlnet_image_count): + with gr.Column(): + ip_image = grh.Image(label='Image', source='upload', type='numpy', show_label=False, height=300) + ip_images.append(ip_image) + ip_ctrls.append(ip_image) + with gr.Column(visible=False) as ad_col: + with gr.Row(): + default_end, default_weight = flags.default_parameters[flags.default_ip] + + ip_stop = gr.Slider(label='Stop At', minimum=0.0, maximum=1.0, step=0.001, value=default_end) + ip_stops.append(ip_stop) + ip_ctrls.append(ip_stop) + + ip_weight = gr.Slider(label='Weight', minimum=0.0, maximum=2.0, step=0.001, value=default_weight) + ip_weights.append(ip_weight) + ip_ctrls.append(ip_weight) + + ip_type = gr.Radio(label='Type', choices=flags.ip_list, value=flags.default_ip, container=False) + ip_types.append(ip_type) + ip_ctrls.append(ip_type) + + ip_type.change(lambda x: flags.default_parameters[x], inputs=[ip_type], outputs=[ip_stop, ip_weight], queue=False, show_progress=False) + ip_ad_cols.append(ad_col) + ip_advanced = gr.Checkbox(label='Advanced', value=False, container=False) + gr.HTML('* \"Image Prompt\" is powered by Fooocus Image Mixture Engine (v1.0.1). \U0001F4D4 Document') + + def ip_advance_checked(x): + return [gr.update(visible=x)] * len(ip_ad_cols) + \ + [flags.default_ip] * len(ip_types) + \ + [flags.default_parameters[flags.default_ip][0]] * len(ip_stops) + \ + [flags.default_parameters[flags.default_ip][1]] * len(ip_weights) + + ip_advanced.change(ip_advance_checked, inputs=ip_advanced, + outputs=ip_ad_cols + ip_types + ip_stops + ip_weights, + queue=False, show_progress=False) + with gr.TabItem(label='Inpaint or Outpaint') as inpaint_tab: + with gr.Row(): + inpaint_input_image = grh.Image(label='Drag inpaint or outpaint image to here', source='upload', type='numpy', tool='sketch', height=500, brush_color="#FFFFFF", elem_id='inpaint_canvas') + inpaint_mask_image = grh.Image(label='Mask Upload', source='upload', type='numpy', height=500, visible=False) + + with gr.Row(): + inpaint_additional_prompt = gr.Textbox(placeholder="Describe what you want to inpaint.", elem_id='inpaint_additional_prompt', label='Inpaint Additional Prompt', visible=False) + outpaint_selections = gr.CheckboxGroup(choices=['Left', 'Right', 'Top', 'Bottom'], value=[], label='Outpaint Direction') + inpaint_mode = gr.Dropdown(choices=modules.flags.inpaint_options, value=modules.flags.inpaint_option_default, label='Method') + example_inpaint_prompts = gr.Dataset(samples=modules.config.example_inpaint_prompts, label='Additional Prompt Quick List', components=[inpaint_additional_prompt], visible=False) + gr.HTML('* Powered by Fooocus Inpaint Engine \U0001F4D4 Document') + example_inpaint_prompts.click(lambda x: x[0], inputs=example_inpaint_prompts, outputs=inpaint_additional_prompt, show_progress=False, queue=False) + with gr.TabItem(label='Describe') as desc_tab: + with gr.Row(): + with gr.Column(): + desc_input_image = grh.Image(label='Drag any image to here', source='upload', type='numpy') + with gr.Column(): + desc_method = gr.Radio( + label='Content Type', + choices=[flags.desc_type_photo, flags.desc_type_anime], + value=flags.desc_type_photo) + desc_btn = gr.Button(value='Describe this Image into Prompt') + gr.HTML('\U0001F4D4 Document') + with gr.TabItem(label='Metadata') as load_tab: + with gr.Column(): + metadata_input_image = grh.Image(label='Drag any image generated by Fooocus here', source='upload', type='filepath') + metadata_json = gr.JSON(label='Metadata') + metadata_import_button = gr.Button(value='Apply Metadata') + + def trigger_metadata_preview(filepath): + parameters, metadata_scheme = modules.meta_parser.read_info_from_image(filepath) + + results = {} + if parameters is not None: + results['parameters'] = parameters + + if isinstance(metadata_scheme, flags.MetadataScheme): + results['metadata_scheme'] = metadata_scheme.value + + return results + + metadata_input_image.upload(trigger_metadata_preview, inputs=metadata_input_image, + outputs=metadata_json, queue=False, show_progress=True) + + switch_js = "(x) => {if(x){viewer_to_bottom(100);viewer_to_bottom(500);}else{viewer_to_top();} return x;}" + down_js = "() => {viewer_to_bottom();}" + + input_image_checkbox.change(lambda x: gr.update(visible=x), inputs=input_image_checkbox, + outputs=image_input_panel, queue=False, show_progress=False, _js=switch_js) + ip_advanced.change(lambda: None, queue=False, show_progress=False, _js=down_js) + + current_tab = gr.Textbox(value='uov', visible=False) + uov_tab.select(lambda: 'uov', outputs=current_tab, queue=False, _js=down_js, show_progress=False) + inpaint_tab.select(lambda: 'inpaint', outputs=current_tab, queue=False, _js=down_js, show_progress=False) + ip_tab.select(lambda: 'ip', outputs=current_tab, queue=False, _js=down_js, show_progress=False) + desc_tab.select(lambda: 'desc', outputs=current_tab, queue=False, _js=down_js, show_progress=False) + + with gr.Column(scale=1, visible=modules.config.default_advanced_checkbox) as advanced_column: + with gr.Tab(label='Setting'): + performance_selection = gr.Radio(label='Performance', + choices=modules.flags.performance_selections, + value=modules.config.default_performance) + aspect_ratios_selection = gr.Radio(label='Aspect Ratios', choices=modules.config.available_aspect_ratios, + value=modules.config.default_aspect_ratio, info='width × height', + elem_classes='aspect_ratios') + image_number = gr.Slider(label='Image Number', minimum=1, maximum=modules.config.default_max_image_number, step=1, value=modules.config.default_image_number) + + output_format = gr.Radio(label='Output Format', + choices=modules.flags.output_formats, + value=modules.config.default_output_format) + + negative_prompt = gr.Textbox(label='Negative Prompt', show_label=True, placeholder="Type prompt here.", + info='Describing what you do not want to see.', lines=2, + elem_id='negative_prompt', + value=modules.config.default_prompt_negative) + seed_random = gr.Checkbox(label='Random', value=True) + image_seed = gr.Textbox(label='Seed', value=0, max_lines=1, visible=False) # workaround for https://github.com/gradio-app/gradio/issues/5354 + + def random_checked(r): + return gr.update(visible=not r) + + def refresh_seed(r, seed_string): + if r: + return random.randint(constants.MIN_SEED, constants.MAX_SEED) + else: + try: + seed_value = int(seed_string) + if constants.MIN_SEED <= seed_value <= constants.MAX_SEED: + return seed_value + except ValueError: + pass + return random.randint(constants.MIN_SEED, constants.MAX_SEED) + + seed_random.change(random_checked, inputs=[seed_random], outputs=[image_seed], + queue=False, show_progress=False) + + def update_history_link(): + if args_manager.args.disable_image_log: + return gr.update(value='') + + return gr.update(value=f'\U0001F4DA History Log') + + history_link = gr.HTML() + shared.gradio_root.load(update_history_link, outputs=history_link, queue=False, show_progress=False) + + with gr.Tab(label='Style'): + style_sorter.try_load_sorted_styles( + style_names=legal_style_names, + default_selected=modules.config.default_styles) + + style_search_bar = gr.Textbox(show_label=False, container=False, + placeholder="\U0001F50E Type here to search styles ...", + value="", + label='Search Styles') + style_selections = gr.CheckboxGroup(show_label=False, container=False, + choices=copy.deepcopy(style_sorter.all_styles), + value=copy.deepcopy(modules.config.default_styles), + label='Selected Styles', + elem_classes=['style_selections']) + gradio_receiver_style_selections = gr.Textbox(elem_id='gradio_receiver_style_selections', visible=False) + + shared.gradio_root.load(lambda: gr.update(choices=copy.deepcopy(style_sorter.all_styles)), + outputs=style_selections) + + style_search_bar.change(style_sorter.search_styles, + inputs=[style_selections, style_search_bar], + outputs=style_selections, + queue=False, + show_progress=False).then( + lambda: None, _js='()=>{refresh_style_localization();}') + + gradio_receiver_style_selections.input(style_sorter.sort_styles, + inputs=style_selections, + outputs=style_selections, + queue=False, + show_progress=False).then( + lambda: None, _js='()=>{refresh_style_localization();}') + + with gr.Tab(label='Model'): + with gr.Group(): + with gr.Row(): + base_model = gr.Dropdown(label='Base Model (SDXL only)', choices=modules.config.model_filenames, value=modules.config.default_base_model_name, show_label=True) + refiner_model = gr.Dropdown(label='Refiner (SDXL or SD 1.5)', choices=['None'] + modules.config.model_filenames, value=modules.config.default_refiner_model_name, show_label=True) + + refiner_switch = gr.Slider(label='Refiner Switch At', minimum=0.1, maximum=1.0, step=0.0001, + info='Use 0.4 for SD1.5 realistic models; ' + 'or 0.667 for SD1.5 anime models; ' + 'or 0.8 for XL-refiners; ' + 'or any value for switching two SDXL models.', + value=modules.config.default_refiner_switch, + visible=modules.config.default_refiner_model_name != 'None') + + refiner_model.change(lambda x: gr.update(visible=x != 'None'), + inputs=refiner_model, outputs=refiner_switch, show_progress=False, queue=False) + + with gr.Group(): + lora_ctrls = [] + + for i, (n, v) in enumerate(modules.config.default_loras): + with gr.Row(): + lora_enabled = gr.Checkbox(label='Enable', value=True, + elem_classes=['lora_enable', 'min_check'], scale=1) + lora_model = gr.Dropdown(label=f'LoRA {i + 1}', + choices=['None'] + modules.config.lora_filenames, value=n, + elem_classes='lora_model', scale=5) + lora_weight = gr.Slider(label='Weight', minimum=modules.config.default_loras_min_weight, + maximum=modules.config.default_loras_max_weight, step=0.01, value=v, + elem_classes='lora_weight', scale=5) + lora_ctrls += [lora_enabled, lora_model, lora_weight] + + with gr.Row(): + model_refresh = gr.Button(label='Refresh', value='\U0001f504 Refresh All Files', variant='secondary', elem_classes='refresh_button') + with gr.Tab(label='Advanced'): + guidance_scale = gr.Slider(label='Guidance Scale', minimum=1.0, maximum=30.0, step=0.01, + value=modules.config.default_cfg_scale, + info='Higher value means style is cleaner, vivider, and more artistic.') + sharpness = gr.Slider(label='Image Sharpness', minimum=0.0, maximum=30.0, step=0.001, + value=modules.config.default_sample_sharpness, + info='Higher value means image and texture are sharper.') + gr.HTML('\U0001F4D4 Document') + dev_mode = gr.Checkbox(label='Developer Debug Mode', value=True, container=True) + + with gr.Column(visible=False) as dev_tools: + with gr.Tab(label='Debug Tools'): + adm_scaler_positive = gr.Slider(label='Positive ADM Guidance Scaler', minimum=0.1, maximum=3.0, + step=0.001, value=1.5, info='The scaler multiplied to positive ADM (use 1.0 to disable). ') + adm_scaler_negative = gr.Slider(label='Negative ADM Guidance Scaler', minimum=0.1, maximum=3.0, + step=0.001, value=0.8, info='The scaler multiplied to negative ADM (use 1.0 to disable). ') + adm_scaler_end = gr.Slider(label='ADM Guidance End At Step', minimum=0.0, maximum=1.0, + step=0.001, value=0.3, + info='When to end the guidance from positive/negative ADM. ') + + refiner_swap_method = gr.Dropdown(label='Refiner swap method', value=flags.refiner_swap_method, + choices=['joint', 'separate', 'vae']) + + adaptive_cfg = gr.Slider(label='CFG Mimicking from TSNR', minimum=1.0, maximum=30.0, step=0.01, + value=modules.config.default_cfg_tsnr, + info='Enabling Fooocus\'s implementation of CFG mimicking for TSNR ' + '(effective when real CFG > mimicked CFG).') + sampler_name = gr.Dropdown(label='Sampler', choices=flags.sampler_list, + value=modules.config.default_sampler) + scheduler_name = gr.Dropdown(label='Scheduler', choices=flags.scheduler_list, + value=modules.config.default_scheduler) + + generate_image_grid = gr.Checkbox(label='Generate Image Grid for Each Batch', + info='(Experimental) This may cause performance problems on some computers and certain internet conditions.', + value=False) + + overwrite_step = gr.Slider(label='Forced Overwrite of Sampling Step', + minimum=-1, maximum=200, step=1, + value=modules.config.default_overwrite_step, + info='Set as -1 to disable. For developer debugging.') + overwrite_switch = gr.Slider(label='Forced Overwrite of Refiner Switch Step', + minimum=-1, maximum=200, step=1, + value=modules.config.default_overwrite_switch, + info='Set as -1 to disable. For developer debugging.') + overwrite_width = gr.Slider(label='Forced Overwrite of Generating Width', + minimum=-1, maximum=2048, step=1, value=-1, + info='Set as -1 to disable. For developer debugging. ' + 'Results will be worse for non-standard numbers that SDXL is not trained on.') + overwrite_height = gr.Slider(label='Forced Overwrite of Generating Height', + minimum=-1, maximum=2048, step=1, value=-1, + info='Set as -1 to disable. For developer debugging. ' + 'Results will be worse for non-standard numbers that SDXL is not trained on.') + overwrite_vary_strength = gr.Slider(label='Forced Overwrite of Denoising Strength of "Vary"', + minimum=-1, maximum=1.0, step=0.001, value=-1, + info='Set as negative number to disable. For developer debugging.') + overwrite_upscale_strength = gr.Slider(label='Forced Overwrite of Denoising Strength of "Upscale"', + minimum=-1, maximum=1.0, step=0.001, value=-1, + info='Set as negative number to disable. For developer debugging.') + disable_preview = gr.Checkbox(label='Disable Preview', value=False, + info='Disable preview during generation.') + disable_intermediate_results = gr.Checkbox(label='Disable Intermediate Results', + value=modules.config.default_performance == 'Extreme Speed', + interactive=modules.config.default_performance != 'Extreme Speed', + info='Disable intermediate results during generation, only show final gallery.') + disable_seed_increment = gr.Checkbox(label='Disable seed increment', + info='Disable automatic seed increment when image number is > 1.', + value=False) + + if not args_manager.args.disable_metadata: + save_metadata_to_images = gr.Checkbox(label='Save Metadata to Images', value=modules.config.default_save_metadata_to_images, + info='Adds parameters to generated images allowing manual regeneration.') + metadata_scheme = gr.Radio(label='Metadata Scheme', choices=flags.metadata_scheme, value=modules.config.default_metadata_scheme, + info='Image Prompt parameters are not included. Use png and a1111 for compatibility with Civitai.', + visible=modules.config.default_save_metadata_to_images) + + save_metadata_to_images.change(lambda x: gr.update(visible=x), inputs=[save_metadata_to_images], outputs=[metadata_scheme], + queue=False, show_progress=False) + + with gr.Tab(label='Control'): + debugging_cn_preprocessor = gr.Checkbox(label='Debug Preprocessors', value=True, + info='See the results from preprocessors.') + skipping_cn_preprocessor = gr.Checkbox(label='Skip Preprocessors', value=True, + info='Do not preprocess images. (Inputs are already canny/depth/cropped-face/etc.)') + + mixing_image_prompt_and_vary_upscale = gr.Checkbox(label='Mixing Image Prompt and Vary/Upscale', + value=True) + mixing_image_prompt_and_inpaint = gr.Checkbox(label='Mixing Image Prompt and Inpaint', + value=True) + + controlnet_softness = gr.Slider(label='Softness of ControlNet', minimum=0.0, maximum=1.0, + step=0.001, value=0.25, + info='Similar to the Control Mode in A1111 (use 0.0 to disable). ') + + with gr.Tab(label='Canny'): + canny_low_threshold = gr.Slider(label='Canny Low Threshold', minimum=1, maximum=255, + step=1, value=64) + canny_high_threshold = gr.Slider(label='Canny High Threshold', minimum=1, maximum=255, + step=1, value=128) + + with gr.Tab(label='Inpaint'): + debugging_inpaint_preprocessor = gr.Checkbox(label='Debug Inpaint Preprocessing', value=True) + inpaint_disable_initial_latent = gr.Checkbox(label='Disable initial latent in inpaint', value=False) + inpaint_engine = gr.Dropdown(label='Inpaint Engine', + value=modules.config.default_inpaint_engine_version, + choices=flags.inpaint_engine_versions, + info='Version of Fooocus inpaint model') + inpaint_strength = gr.Slider(label='Inpaint Denoising Strength', + minimum=0.0, maximum=1.0, step=0.001, value=1.0, + info='Same as the denoising strength in A1111 inpaint. ' + 'Only used in inpaint, not used in outpaint. ' + '(Outpaint always use 1.0)') + inpaint_respective_field = gr.Slider(label='Inpaint Respective Field', + minimum=0.0, maximum=1.0, step=0.001, value=0.618, + info='The area to inpaint. ' + 'Value 0 is same as "Only Masked" in A1111. ' + 'Value 1 is same as "Whole Image" in A1111. ' + 'Only used in inpaint, not used in outpaint. ' + '(Outpaint always use 1.0)') + inpaint_erode_or_dilate = gr.Slider(label='Mask Erode or Dilate', + minimum=-64, maximum=64, step=1, value=0, + info='Positive value will make white area in the mask larger, ' + 'negative value will make white area smaller.' + '(default is 0, always process before any mask invert)') + inpaint_mask_upload_checkbox = gr.Checkbox(label='Enable Mask Upload', value=True) + invert_mask_checkbox = gr.Checkbox(label='Invert Mask', value=False) + + inpaint_ctrls = [debugging_inpaint_preprocessor, inpaint_disable_initial_latent, inpaint_engine, + inpaint_strength, inpaint_respective_field, + inpaint_mask_upload_checkbox, invert_mask_checkbox, inpaint_erode_or_dilate] + + inpaint_mask_upload_checkbox.change(lambda x: gr.update(visible=x), + inputs=inpaint_mask_upload_checkbox, + outputs=inpaint_mask_image, queue=False, show_progress=False) + + with gr.Tab(label='FreeU'): + freeu_enabled = gr.Checkbox(label='Enabled', value=False) + freeu_b1 = gr.Slider(label='B1', minimum=0, maximum=2, step=0.01, value=1.01) + freeu_b2 = gr.Slider(label='B2', minimum=0, maximum=2, step=0.01, value=1.02) + freeu_s1 = gr.Slider(label='S1', minimum=0, maximum=4, step=0.01, value=0.99) + freeu_s2 = gr.Slider(label='S2', minimum=0, maximum=4, step=0.01, value=0.95) + freeu_ctrls = [freeu_enabled, freeu_b1, freeu_b2, freeu_s1, freeu_s2] + + def dev_mode_checked(r): + return gr.update(visible=r) + + + dev_mode.change(dev_mode_checked, inputs=[dev_mode], outputs=[dev_tools], + queue=False, show_progress=False) + + def model_refresh_clicked(): + modules.config.update_all_model_names() + results = [gr.update(choices=modules.config.model_filenames)] + results += [gr.update(choices=['None'] + modules.config.model_filenames)] + for i in range(modules.config.default_max_lora_number): + results += [gr.update(interactive=True), gr.update(choices=['None'] + modules.config.lora_filenames), gr.update()] + return results + + model_refresh.click(model_refresh_clicked, [], [base_model, refiner_model] + lora_ctrls, + queue=False, show_progress=False) + + performance_selection.change(lambda x: [gr.update(interactive=x != 'Extreme Speed')] * 11 + + [gr.update(visible=x != 'Extreme Speed')] * 1 + + [gr.update(interactive=x != 'Extreme Speed', value=x == 'Extreme Speed', )] * 1, + inputs=performance_selection, + outputs=[ + guidance_scale, sharpness, adm_scaler_end, adm_scaler_positive, + adm_scaler_negative, refiner_switch, refiner_model, sampler_name, + scheduler_name, adaptive_cfg, refiner_swap_method, negative_prompt, disable_intermediate_results + ], queue=False, show_progress=False) + + output_format.input(lambda x: gr.update(output_format=x), inputs=output_format) + + advanced_checkbox.change(lambda x: gr.update(visible=x), advanced_checkbox, advanced_column, + queue=False, show_progress=False) \ + .then(fn=lambda: None, _js='refresh_grid_delayed', queue=False, show_progress=False) + + def inpaint_mode_change(mode): + assert mode in modules.flags.inpaint_options + + # inpaint_additional_prompt, outpaint_selections, example_inpaint_prompts, + # inpaint_disable_initial_latent, inpaint_engine, + # inpaint_strength, inpaint_respective_field + + if mode == modules.flags.inpaint_option_detail: + return [ + gr.update(visible=True), gr.update(visible=False, value=[]), + gr.Dataset.update(visible=True, samples=modules.config.example_inpaint_prompts), + False, 'None', 0.5, 0.0 + ] + + if mode == modules.flags.inpaint_option_modify: + return [ + gr.update(visible=True), gr.update(visible=False, value=[]), + gr.Dataset.update(visible=False, samples=modules.config.example_inpaint_prompts), + True, modules.config.default_inpaint_engine_version, 1.0, 0.0 + ] + + return [ + gr.update(visible=False, value=''), gr.update(visible=True), + gr.Dataset.update(visible=False, samples=modules.config.example_inpaint_prompts), + False, modules.config.default_inpaint_engine_version, 1.0, 0.618 + ] + + inpaint_mode.input(inpaint_mode_change, inputs=inpaint_mode, outputs=[ + inpaint_additional_prompt, outpaint_selections, example_inpaint_prompts, + inpaint_disable_initial_latent, inpaint_engine, + inpaint_strength, inpaint_respective_field + ], show_progress=False, queue=False) + + ctrls = [currentTask, generate_image_grid] + ctrls += [ + prompt, negative_prompt, style_selections, + performance_selection, aspect_ratios_selection, image_number, output_format, image_seed, sharpness, guidance_scale + ] + + ctrls += [base_model, refiner_model, refiner_switch] + lora_ctrls + ctrls += [input_image_checkbox, current_tab] + ctrls += [uov_method, uov_input_image] + ctrls += [outpaint_selections, inpaint_input_image, inpaint_additional_prompt, inpaint_mask_image] + ctrls += [disable_preview, disable_intermediate_results, disable_seed_increment] + ctrls += [adm_scaler_positive, adm_scaler_negative, adm_scaler_end, adaptive_cfg] + ctrls += [sampler_name, scheduler_name] + ctrls += [overwrite_step, overwrite_switch, overwrite_width, overwrite_height, overwrite_vary_strength] + ctrls += [overwrite_upscale_strength, mixing_image_prompt_and_vary_upscale, mixing_image_prompt_and_inpaint] + ctrls += [debugging_cn_preprocessor, skipping_cn_preprocessor, canny_low_threshold, canny_high_threshold] + ctrls += [refiner_swap_method, controlnet_softness] + ctrls += freeu_ctrls + ctrls += inpaint_ctrls + + if not args_manager.args.disable_metadata: + ctrls += [save_metadata_to_images, metadata_scheme] + + ctrls += ip_ctrls + + state_is_generating = gr.State(False) + + def parse_meta(raw_prompt_txt, is_generating): + loaded_json = None + if is_json(raw_prompt_txt): + loaded_json = json.loads(raw_prompt_txt) + + if loaded_json is None: + if is_generating: + return gr.update(), gr.update(), gr.update() + else: + return gr.update(), gr.update(visible=True), gr.update(visible=False) + + return json.dumps(loaded_json), gr.update(visible=False), gr.update(visible=True) + + prompt.input(parse_meta, inputs=[prompt, state_is_generating], outputs=[prompt, generate_button, load_parameter_button], queue=False, show_progress=False) + + load_data_outputs = [advanced_checkbox, image_number, prompt, negative_prompt, style_selections, + performance_selection, overwrite_step, overwrite_switch, aspect_ratios_selection, + overwrite_width, overwrite_height, guidance_scale, sharpness, adm_scaler_positive, + adm_scaler_negative, adm_scaler_end, refiner_swap_method, adaptive_cfg, base_model, + refiner_model, refiner_switch, sampler_name, scheduler_name, seed_random, image_seed, + generate_button, load_parameter_button] + freeu_ctrls + lora_ctrls + + load_parameter_button.click(modules.meta_parser.load_parameter_button_click, inputs=[prompt, state_is_generating], outputs=load_data_outputs, queue=False, show_progress=False) + + def trigger_metadata_import(filepath, state_is_generating): + parameters, metadata_scheme = modules.meta_parser.read_info_from_image(filepath) + if parameters is None: + print('Could not find metadata in the image!') + parsed_parameters = {} + else: + metadata_parser = modules.meta_parser.get_metadata_parser(metadata_scheme) + parsed_parameters = metadata_parser.parse_json(parameters) + + return modules.meta_parser.load_parameter_button_click(parsed_parameters, state_is_generating) + + + metadata_import_button.click(trigger_metadata_import, inputs=[metadata_input_image, state_is_generating], outputs=load_data_outputs, queue=False, show_progress=True) \ + .then(style_sorter.sort_styles, inputs=style_selections, outputs=style_selections, queue=False, show_progress=False) + + generate_button.click(lambda: (gr.update(visible=True, interactive=True), gr.update(visible=True, interactive=True), gr.update(visible=False, interactive=False), [], True), + outputs=[stop_button, skip_button, generate_button, gallery, state_is_generating]) \ + .then(fn=refresh_seed, inputs=[seed_random, image_seed], outputs=image_seed) \ + .then(fn=get_task, inputs=ctrls, outputs=currentTask) \ + .then(fn=generate_clicked, inputs=currentTask, outputs=[progress_html, progress_window, progress_gallery, gallery]) \ + .then(lambda: (gr.update(visible=True, interactive=True), gr.update(visible=False, interactive=False), gr.update(visible=False, interactive=False), False), + outputs=[generate_button, stop_button, skip_button, state_is_generating]) \ + .then(fn=update_history_link, outputs=history_link) \ + .then(fn=lambda: None, _js='playNotification').then(fn=lambda: None, _js='refresh_grid_delayed') + + for notification_file in ['notification.ogg', 'notification.mp3']: + if os.path.exists(notification_file): + gr.Audio(interactive=False, value=notification_file, elem_id='audio_notification', visible=False) + break + + def trigger_describe(mode, img): + if mode == flags.desc_type_photo: + from extras.interrogate import default_interrogator as default_interrogator_photo + return default_interrogator_photo(img), ["Fooocus V2", "Fooocus Enhance", "Fooocus Sharp"] + if mode == flags.desc_type_anime: + from extras.wd14tagger import default_interrogator as default_interrogator_anime + return default_interrogator_anime(img), ["Fooocus V2", "Fooocus Masterpiece"] + return mode, ["Fooocus V2"] + + desc_btn.click(trigger_describe, inputs=[desc_method, desc_input_image], + outputs=[prompt, style_selections], show_progress=True, queue=True) + + +def dump_default_english_config(): + from modules.localization import dump_english_config + dump_english_config(grh.all_components) + + +# dump_default_english_config() + +shared.gradio_root.launch( + inbrowser=args_manager.args.in_browser, + server_name=args_manager.args.listen, + server_port=args_manager.args.port, + share=args_manager.args.share, + auth=check_auth if (args_manager.args.share or args_manager.args.listen) and auth_enabled else None, + allowed_paths=[modules.config.path_outputs], + blocked_paths=[constants.AUTH_FILENAME] +) diff --git a/wildcards/animal.txt b/wildcards/animal.txt new file mode 100644 index 0000000000000000000000000000000000000000..3c479daa0969d10f4a77b133d0fe94024e4f02a9 --- /dev/null +++ b/wildcards/animal.txt @@ -0,0 +1,100 @@ +Alligator +Ant +Antelope +Armadillo +Badger +Bat +Bear +Beaver +Bison +Boar +Bobcat +Bull +Camel +Chameleon +Cheetah +Chicken +Chihuahua +Chimpanzee +Chinchilla +Chipmunk +Komodo Dragon +Cow +Coyote +Crocodile +Crow +Deer +Dinosaur +Dolphin +Donkey +Duck +Eagle +Eel +Elephant +Elk +Emu +Falcon +Ferret +Flamingo +Flying Squirrel +Giraffe +Goose +Guinea pig +Hawk +Hedgehog +Hippopotamus +Horse +Hummingbird +Hyena +Jackal +Jaguar +Jellyfish +Kangaroo +King Cobra +Koala bear +Leopard +Lion +Lizard +Magpie +Marten +Meerkat +Mole +Monkey +Moose +Mouse +Octopus +Okapi +Orangutan +Ostrich +Otter +Owl +Panda +Pangolin +Panther +Penguin +Pig +Porcupine +Possum +Puma +Quokka +Rabbit +Raccoon +Raven +Reindeer +Rhinoceros +Seal +Shark +Sheep +Snail +Snake +Sparrow +Spider +Squirrel +Swallow +Tiger +Walrus +Whale +Wolf +Wombat +Yak +Zebra diff --git a/wildcards/artist.txt b/wildcards/artist.txt new file mode 100644 index 0000000000000000000000000000000000000000..be64c2d3d0d795af40ef694cf85be80a90996975 --- /dev/null +++ b/wildcards/artist.txt @@ -0,0 +1,2765 @@ +A. J. Casson +Aaron Douglas +Aaron Horkey +Aaron Jasinski +Aaron Siskind +Abbott Fuller Graves +Abbott Handerson Thayer +Abigail Larson +Abraham Pether +Abram Efimovich Arkhipov +Adam Elsheimer +Adam Hughes +Adam Martinakis +Adi Granov +Adolf Hirémy-Hirschl +Adolph Gottlieb +Adolph Menzel +Adonna Khare +Adriaen van Ostade +Adriaen van Utrecht +Adrian Donoghue +Adrian Ghenie +Adrian Smith +Adrian Tomine +Adrianus Eversen +Afarin Sajedi +Agnes Cecile +Agnes Lawrence Pelton +Agnes Martin +Agostino Arrivabene +Akihiko Yoshida +Akira Toriyama +Akos Major +Al Capp +Al Feldstein +Al Williamson +Alain Laboile +Alan Bean +Alan Kenny +Alan Lee +Alan Moore +Alan Parry +Alasdair McLellan +Albert Benois +Albert Bierstadt +Albert Dubois-Pillet +Albert Edelfelt +Albert Gleizes +Albert Goodwin +Albert Joseph Moore +Albert Lynch +Albert Marquet +Albert Pinkham Ryder +Alberto Biasi +Alberto Burri +Alberto Giacometti +Alberto Seveso +Alberto Sughi +Albrecht Anker +Albrecht Dürer +Alec Soth +Alejandro Burdisio +Aleksey Savrasov +Aleksi Briclot +Alena Aenami +Alessandro Allori +Alessandro Gottardo +Alessio Albi +Alex Alemany +Alex Andreev +Alex Colville +Alex Garant +Alex Grey +Alex Gross +Alex Hirsch +Alex Horley +Alex Howitt +Alex Katz +Alex Maleev +Alex Prager +Alex Ross +Alex Russell Flint +Alex Timmermans +Alex Toth +Alexander Archipenko +Alexander Jansson +Alexander McQueen +Alexander Millar +Alexandr Averin +Alexandre Benois +Alexandre Cabanel +Alexandre Calame +Alexandre-Évariste Fragonard +Alexej von Jawlensky +Alfred Augustus Glendening +Alfred Cheney Johnston +Alfred Eisenstaedt +Alfred Guillou +Alfred Kubin +Alfred Munnings +Alfred Parsons +Alfred Sisley +Alfred Stevens +Alfredo Jaar +Algernon Blackwood +Alice Bailly +Alice Neel +Alice Pasquini +Alice Rahon +Alison Bechdel +Aliza Razell +Allie Brosh +Alma Woodsey Thomas +Alois Arnegger +Alphonse Mucha +Alphonse Osbert +Alson Skinner Clark +Alvar Aalto +Álvaro Siza +Alvin Langdon Coburn +Alyssa Monks +Amanda Clark +Amanda Sage +Amandine van Ray +Ambrosius Benson +Ambrosius Bosschaert +Amedeo Modigliani +Amy Earles +Amy Judd +Amy Sillman +Anato Finnstark +Anatoly Metlan +Anders Zorn +Ando Fuchs +Andre de Dienes +Andre Derain +André Kertész +Andre Kohn +André Lhote +André Masson +Andre Norton +Andre-Charles Boulle +Andrea Kowch +Andrea Mantegna +Andreas Achenbach +Andreas Franke +Andreas Levers +Andreas Rocha +Andreas Vesalius +Andrei Markin +Andrew Atroshenko +Andrew Ferez +Andrew Macara +Andrew Robinson +Andrew Wyeth +Andrey Remnev +Android Jones +Andy Fairhurst +Andy Goldsworthy +Andy Kehoe +Andy Singer +Andy Warhol +Angela Barrett +Angelina Wrona +Angus McKie +Anja Millen +Anja Percival +Anka Zhuravleva +Ann Stookey +Anna Ancher +Anna Bocek +Anna Dittmann +Anne Bachelier +Anne Brigman +Anne Dewailly +Anne Geddes +Anne McCaffrey +Anne Packard +Anne Rothenstein +Anne Stokes +Anne Truitt +Anne-Louis Girodet +Anni Albers +Annibale Carracci +Annick Bouvattier +Annie Leibovitz +Annie Soudain +Annie Swynnerton +Ansel Adams +Anselm Kiefer +Antanas Sutkus +Anthony Thieme +Anthony van Dyck +Antoine Blanchard +Anton Fadeev +Anton Mauve +Anton Otto Fischer +Anton Pieck +Anton Raphael Mengs +Anton Semenov +Antonello da Messina +Antoni Gaudí +Antonio Canova +Antonio J. Manzanedo +Antonio Mancini +Antonio Mora +Antony Gormley +Apollinary Vasnetsov +Apollonia Saintclair +Aquirax Uno +Archibald Thorburn +Aries Moross +Aristarkh Lentulov +Aristide Maillol +Armand Guillaumin +Armand Point +Arnold Böcklin +Aron Demetz +Aron Wiesenfeld +Arshile Gorky +Art Frahm +Art Spiegelman +Artemisia Gentileschi +Arthur Adams +Arthur Boyd +Arthur Dove +Arthur Elgort +Arthur Hacker +Arthur Hughes +Arthur Lismer +Arthur Rackham +Arthur Sarnoff +Arthur Stanley Wilkinson +Arthur Streeton +Arthur Tress +Arthur Wardle +Artur Bordalo +Arturo Souto +Ary Scheffer +Asaf Hanuka +Asger Jorn +Asher Brown Durand +Ashley Wood +Atelier Olschinsky +Atey Ghailan +Aubrey Beardsley +Audrey Kawasaki +August Friedrich Schenck +August Macke +August Sander +Auguste Herbin +Auguste Toulmouche +Augustus Edwin Mulready +Augustus John +Austin Briggs +Austin Osman Spare +Ayami Kojima +Barbara Hepworth +Barbara Kruger +Barbara Stauffacher Solomon +Barbara Takenaga +Barclay Shaw +Barkley L. Hendricks +Barnett Newman +Barry McGee +Barry Windsor Smith +Bart Sears +Bartolomé Esteban Murillo +Basil Gogos +Bastien Lecouffe-Deharme +Bayard Wu +Beatrix Potter +Beauford Delaney +Becky Cloonan +Bella Kotak +Ben Aronson +Ben Goossens +Ben Hatke +Ben Nicholson +Ben Quilty +Ben Shahn +Ben Templesmith +Benedick Bana +Benoit B. Mandelbrot +Bernard Buffet +Bernardo Bellotto +Bernardo Strozzi +Berndnaut Smilde +Bernie Wrightson +Bert Hardy +Bert Stern +Berthe Morisot +Bertil Nilsson +Beth Conklin +Bettina Rheims +Bhupen Khakhar +Bill Brandt +Bill Brauer +Bill Carman +Bill Durgin +Bill Gekas +Bill Henson +Bill Jacklin +Bill Medcalf +Bill Sienkiewicz +Bill Traylor +Bill Viola +Bill Ward +Bill Watterson +Billy Childish +Bjarke Ingels +Bo Bartlett +Bo Chen +Bob Byerley +Bob Clampett +Bob Eggleton +Bob Ross +Bojan Jevtic +Boris Grigoriev +Boris Groh +Boris Kustodiev +Boris Vallejo +Brad Kunkle +Brad Rigney +Brandon Mably +Brandon Woelfel +Brent Heighton +Brett Weston +Brett Whiteley +Brian Bolland +Brian Despain +Brian Froud +Brian K. Vaughan +Brian Kesinger +Brian M. Viveros +Brian Mashburn +Brian Stelfreeze +Brian Sum +Briana Mora +Brice Marden +Bridget Bate Tichenor +Bridget Riley +Briton Rivière +Brooke DiDonato +Brooke Shaden +Brothers Grimm +Brothers Hildebrandt +Bruce Davidson +Bruce Munro +Bruce Nauman +Bruce Pennington +Bruce Timm +Bruno Catalano +Bruno Munari +Bruno Walpoth +Bryan Hitch +Buckminster Fuller +Burt Glinn +Butcher Billy +C. R. W. Nevinson +Cai Guo-Qiang +Caia Koopman +Callie Fink +Camilla d'Errico +Camille Claudel +Camille Corot +Camille Pissarro +Camille Walala +Candido Portinari +Caras Ionut +Carel Willink +Carl Barks +Carl Gustav Carus +Carl Holsoe +Carl Kleiner +Carl Larsson +Carl Moll +Carl Spitzweg +Carlo Crivelli +Carlo Dolci +Carlo Scarpa +Carlos Cruz-Diez +Carlos Schwabe +Carne Griffiths +Carolina Herrera +Carolyn Blish +Carrie Ann Baade +Carrie Graber +Carrie Mae Weems +Carsten Holler +Carsten Meyerdierks +Casey Baugh +Casey Childs +Casey Weldon +Caspar David Friedrich +Catherine Hyde +Catherine Nolin +Cathy Wilkes +Catrin Welz-Stein +Cecil Beaton +Cecilia Beaux +Cecily Brown +Cedric Seaut +Cerith Wyn Evans +CFA Voysey +Chad Knight +Chaïm Soutine +Chantal Joffe +Charles Addams +Charles Angrand +Charles Blackman +Charles Burns +Charles Camoin +Charles Courtney Curran +Charles Demuth +Charles Dwyer +Charles E. Burchfield +Charles Ginner +Charles Gwathmey +Charles Le Brun +Charles Maurice Detmold +Charles Mellin +Charles Reiffel +Charles Rennie Mackintosh +Charles Robinson +Charles Schulz +Charles Sheeler +Charles Spencelayh +Charles Tunnicliffe +Charles Vess +Charles Willson Peale +Charles Wysocki +Charles-Amable Lenoir +Charles-Francois Daubigny +Charley Harper +Charlie Bowater +Charline von Heyl +Chen Zhen +Chesley Bonestell +Chiara Bautista +Chie Yoshii +Chiharu Shiota +Chiho Aoshima +Childe Hassam +Chip Zdarsky +Chris Bachalo +Chris Claremont +Chris Cunningham +Chris Dyer +Chris Foss +Chris Friel +Chris Leib +Chris Mars +Chris Moore +Chris Ofili +Chris Riddell +Chris Samnee +Chris Uminga +Chris Van Allsburg +Chris Ware +Christian Boltanski +Christian Schad +Christian Schloe +Christine Ellger +Christoffer Relander +Christophe Jacrot +Christophe Staelens +Christophe Vacher +Christopher Balaskas +Christopher Ryan McKenney +Christopher Wool +Chuck Close +Chung Shek +Cicely Mary Barker +Cildo Meireles +Cindy Sherman +Claes Oldenburg +Clara Peeters +Clarence Gagnon +Claude Cahun +Claude Lorrain +Claude Monet +Claudia Tremblay +Clay Mann +Clayton Crain +Clemens Ascher +Cleon Peterson +Cliff Chiang +Clifford Coffin +Clint Langley +Clive Barker +Clive Madgwick +Clyde Caldwell +Clyfford Still +Coby Whitmore +Coles Phillips +Colin Campbell Cooper +Conrad Roset +Conrad Shawcross +Constant Permeke +Constantin Brancusi +Constantin Joffe +Cornelia Parker +Cornelis Springer +Cory Arcangel +Cory Loftis +Craig Davison +Craig McCracken +Craig Mullins +Craigie Aitchison +Cuno Amiet +Cyril Rolando +Daan Roosegaarde +Daido Moriyama +Dain Yoon +Dale Chihuly +Damien Hirst +Dan Colen +Dan Flavin +Dan Hillier +Dan Matutina +Dan McPharlin +Dan Mumford +Dan Piraro +Dan Witz +Dana Schutz +Danh Võ +Daniel Arsham +Daniel Buren +Daniel Clowes +Daniel F. Gerhartz +Daniel Garber +Daniel Jaems +Daniel Libeskind +Daniel Merriam +Daniel Ridgway Knight +Daniela Uhlig +Danny Lyon +Danny Roberts +Dante Gabriel Rossetti +Daria Endresen +Daria Petrilli +Dariusz Klimczak +Darwyn Cooke +Dave Coverly +Dave Dorman +Dave Gibbons +Dave McKean +Dave Stevens +David A. Hardy +David Aja +David Alfaro Siqueiros +David B. Mattingly +David Bailey +David Bates +David Bomberg +David Brayne +David Brown Milne +David Burdeny +David Burliuk +David Chipperfield +David Choe +David Downton +David Driskell +David Finch +David Goldblatt +David Hammons +David Hettinger +David Hockney +David Inshaw +David LaChapelle +David Ligare +David Lynch +David Mann +David Michael Bowers +David Mould +David Nordahl +David Palumbo +David Plowden +David Renshaw +David Sims +David Spriggs +David Teniers the Younger +David Tindle +David Tutwiler +David Walker +David Welker +David Wiesner +David Wojnarowicz +David Yarrow +Davide Sorrenti +Dean Cornwell +Dean Ellis +Debbie Criswell +Debbie Fleming Caffery +Deborah Azzopardi +Deborah Turbeville +Dee Nickerson +Deirdre Sullivan-Beeman +Delphin Enjolras +Denis Sarazhin +Dennis Stock +Denys Lasdun +Derek Gores +Desmond Morris +Diane Arbus +Didier Lourenço +Diego Dayer +Diego Rivera +Diego Velázquez +Dima Dmitriev +Dimitra Milan +Dimitry Roulland +Dino Valls +Dmitri Danish +Dmitry Kustanovich +Dmitry Spiros +Do Ho Suh +Dod Procter +Don Bergland +Don Blanding +Don Bluth +Don Lawrence +Don Maitz +Donald Judd +Donato Giancola +Donna Huanca +Dora Carrington +Dora Maar +Dorina Costras +Dorothea Lange +Dorothea Sharp +Dorothea Tanning +Dorothy Johnstone +Dorothy Lathrop +Doug Aitken +Doug Hyde +Douglas Smith +Dr. Seuss +Drew Struzan +Duffy Sheridan +Dustin Nguyen +Duy Huynh +E. H. Shepard +Eadweard Muybridge +Earl Norem +Eastman Johnson +Ebru Sidar +Ed Binkley +Ed Brubaker +Ed Emshwiller +Ed Freeman +Ed Mell +Ed Piskor +Ed Roth +Eddie Campbell +Eddie Colla +Eddie Mendoza +Edgar Degas +Edgar Maxence +Edmondo Senatore +Edmund Dulac +Edmund Leighton +Edmund Tarbell +Edoardo Tresoldi +Édouard Manet +Édouard Vuillard +Eduard Gaertner +Eduard Veith +Edvard Munch +Edward Atkinson Hornel +Edward Bawden +Edward Blair Wilkins +Edward Burne-Jones +Edward Cucuel +Edward Gorey +Edward Henry Potthast +Edward Hopper +Edward Julius Detmold +Edward Lear +Edward Moran +Edward Okuń +Edward Poynter +Edward Robert Hughes +Edward Seago +Edward Steichen +Edward Wadsworth +Edward Weston +Edwin Austin Abbey +Edwin Henry Landseer +Edwin Lord Weeks +Eero Saarinen +Egon Schiele +Eiichiro Oda +Eiko Ojala +Eileen Agar +Eileen Gray +Eilif Peterssen +El Anatsui +El Lissitzky +Elaine de Kooning +Eleanor Fortescue-Brickdale +Eleanor Vere Boyle +Elena Paraskeva +Elihu Vedder +Elisabeth Sonrel +Élisabeth Vigée Le Brun +Elizabeth Catlett +Elizabeth Gadd +Elke Vogelsang +Ellen Jewett +Ellen von Unwerth +Elliott Erwitt +Ellsworth Kelly +Elly Smallwood +Elsa Beskow +Elsa Bleda +Emek Golan +Emerico Imre Toth +Emil Alzamora +Emil Carlsen +Emil Ferris +Emil Melmoth +Emil Nolde +Émile Bernard +Emile Claus +Émile Gallé +Emilia Wilk +Emiliano Ponzi +Emily Balivet +Emily Carr +Emily Kame Kngwarreye +Emma Geary +Emma Ríos +Emmanuelle Moureaux +Enki Bilal +Enoch Bolles +Ephraim Moses Lilien +Eric Canete +Eric Carle +Eric Fischl +Eric Ravilious +Eric Wallis +Eric Zener +Erica Hopper +Erich Heckel +Erik Johansson +Erik Jones +Erin Hanson +Ernest Lawson +Ernest Meissonier +Ernest Zacharevic +Ernesto Neto +Ernie Barnes +Ernst Barlach +Ernst Fuchs +Ernst Haas +Ernst Haeckel +Ernst Ludwig Kirchner +Ernst Wilhelm Nay +Erwin Blumenfeld +Esaias van de Velde +Esao Andrews +Etam Cru +Etel Adnan +Ethan Van Sciver +Étienne Adolphe Piot +Ettore Sottsass +Ettore Tito +Euan Uglow +Eugène Atget +Eugène Boudin +Eugene Delacroix +Eugene Galien-Laloue +Eugène Girardet +Eugène Giraud +Eugène Grasset +Eugene von Guerard +Eustache Le Sueur +Eva Rothschild +Eve Arnold +Eve Ventrue +Evelyn De Morgan +Evelyn Dunbar +Everett Shinn +Evgeni Gordiets +Evgeny Lushpin +Eyvind Earle +Ezra Stoller +Fabian Perez +Fabio Hurtado +Fairfield Porter +Faith Ringgold +Fan Ho +Fang Lijun +Farel Dalrymple +Fatima Ronquillo +Fay Godwin +Felice Casorati +Felicia Simion +Felicien Rops +Felipe Pantone +Felix Gonzalez-Torres +Felix Vallotton +Fenghua Zhong +Ferdinand Hodler +Ferdinand Keller +Ferdinand Knab +Ferenc Pinter +Fern Isabel Coppedge +Fernand Cormon +Fernand Fonssagrives +Fernand Khnopff +Fernand Leger +Fernand Toussaint +Fernando Amorsolo +Fernando Botero +Ferris Plock +Filip Hodas +Filippino Lippi +Filippo Brunelleschi +Fintan Magee +Firmin Baes +Fletcher Sibthorp +Flora Borsi +Florence Harrison +Florian Nicolle +Floris Arntzenius +Ford Madox Brown +Fra Angelico +Frances MacDonald +Francesca Woodman +Francesco Borromini +Francesco Clemente +Francesco Guardi +Francesco Hayez +Francesco Solimena +Francine Van Hove +Francis Bacon +Francis Coates Jones +Francis Davis Millet +Francis Picabia +Francisco de Zurbaran +Francisco Goya +Franco Fontana +François Boucher +Francois Schuiten +Frank Auerbach +Frank Bramley +Frank Cadogan Cowper +Frank Cho +Frank Frazetta +Frank Gehry +Frank Holl +Frank Lloyd Wright +Frank McCarthy +Frank Miller +Frank Montague Moore +Frank Quitely +Frank Stella +Frank Thorne +Frank Weston Benson +Frank Xavier Leyendecker +Franklin Booth +Franklin Carmichael +Frans Floris +Frans Francken the Younger +Frans Hals +Frans Snyders +Frantisek Kupka +Franz Kline +Franz Marc +Franz Sedlacek +Franz Stuck +Franz West +Franz Xaver Winterhalter +Fred Calleri +Fred Stein +Fred Tomaselli +Frederic Bazille +Frederic Edwin Church +Frederic Remington +Frederick Arthur Bridgman +Frederick Carl Frieseke +Frederick Cayley Robinson +Frederick Goodall +Frederick Judd Waugh +Frederick Lord Leighton +Frederick McCubbin +Frederick Sandys +Frida Kahlo +Friedensreich Regentag Dunkelbunt Hundertwasser +Frieke Janssens +Frits Thaulow +Frits Van den Berghe +Fritz Henle +Fritz Scholder +Gabriel Dawe +Gabriel Metsu +Gabriel Pacheco +Gabriel von Max +Gabriele Dell'otto +Gabriele Münter +Gabriele Viertel +Gaetano Pesce +Gail Potocki +Gail Simone +Gareth Pugh +Gari Melchers +Garry Winogrand +Gary Baseman +Gary Bunt +Gary Hume +Gary Larson +Gary Panter +Gaston Bussière +Gaston Lachaise +Gediminas Pranckevicius +Gemma Correll +Gene Luen Yang +Genndy Tartakovsky +Geof Darrow +Geof Kern +Geoff Johns +Georg Baselitz +Georg Jensen +George Ault +George Barbier +George Bellows +George Birrell +George Caleb Bingham +George Callaghan +George Catlin +George Christakis +George Clausen +George Condo +George Cruikshank +George Digalakis +George Elgar Hicks +George Frederic Watts +George Goodwin Kilburne +George Grosz +George Henry Boughton +George Herriman +George Hillyard Swinstead +George Hurrell +George Inness +George Lucas +George Luks +George Morland +George Pemba +George Perez +George Platt Lynes +George Romney +George Segal +George Stefanescu +George Stubbs +George Tice +George Tooker +George Underwood +Georges Braque +Georges Clairin +Georges de La Tour +Georges Lacombe +Georges Ribemont-Dessaignes +Georges Rouault +Georges Rousse +Georges Seurat +Georgia O'Keeffe +Georgy Kurasov +Gerald Brom +Gerald Harvey Jones +Gerard David +Gerard ter Borch +Gerard van Honthorst +Gerd Arntz +Gerda Taro +Gerda Wegener +Gerhard Gluck +Gerhard Munthe +Gerhard Richter +Germaine Krull +Gertrude Abercrombie +Gertrude Käsebier +Ghada Amer +Giacomo Balla +Gian Lorenzo Bernini +Gianni Strino +Gifford Beal +Gil Elvgren +Gilbert Garcin +Gilbert Williams +Gino Severini +Giorgio Barbarelli da Castelfranco +Giorgio de Chirico +Giorgio Morandi +Giorgio Vasari +Giovanni Battista Gaulli +Giovanni Battista Piranesi +Giovanni Battista Tiepolo +Giovanni Battista Venanzi +Giovanni Boldini +Giovanni Domenico Tiepolo +Giovanni Segantini +Giuseppe Arcimboldo +Giuseppe de Nittis +Gjon Mili +Glen Keane +Glenn Fabry +Go Nagai +Godfrey Kneller +Godfried Schalcken +Gordon Parks +Goro Fujita +Gottfried Helnwein +Govaert Flinck +Grace Cossington Smith +Graham Sutherland +Grandma Moses +Grant Morrison +Grant Wood +Granville Redmond +Grayson Perry +Greg Girard +Greg Hildebrandt +Greg Land +Greg Olsen +Greg Rucka +Greg Rutkowski +Greg Simkins +Greg Tocchini +Grégoire Guillemin +Gregory Colbert +Gregory Crewdson +Grete Stern +Grigory Gluckmann +Gris Grimly +Guido Borelli da Caluso +Guido Crepax +Guido Reni +Guido van Helten +Guillaume Seignac +Guillem H. Pongiluppi +Guo Pei +Gustav Klimt +Gustave Baumann +Gustave Buchet +Gustave Caillebotte +Gustave Courbet +Gustave Doré +Gustave Loiseau +Gustave Moreau +Gustave Van de Woestijne +Guy Aroch +Guy Billout +Guy Carleton Wiggins +Guy Denning +Guy Rose +Gwen John +Gwenda Morgan +György Kepes +H. R. (Hans Ruedi) Giger +H.P. Lovecraft +Hajime Sorayama +Hal Foster +Hale Woodruff +Hannah Hoch +Hans Andersen Brendekilde +Hans Baldung +Hans Baluschek +Hans Bellmer +Hans Christian Andersen +Hans Haacke +Hans Hartung +Hans Hofmann +Hans Holbein the Elder +Hans Holbein the Younger +Hans Makart +Hans Memling +Hans Thoma +Hans Zatzka +Harald Sohlberg +Harold Cazneaux +Harold Edgerton +Harold Gilman +Harold Harvey +Haroon Mirza +Harriet Backer +Harriet Lee-Merrion +Harry Bertoia +Harry Callahan +Harry Clarke +Harry Watrous +Harumi Hironaka +Harvey Kurtzman +Harvey Stein +Hassan Hajjaj +Hasui Kawase +Hayao Miyazaki +Hayv Kahraman +He Jiaying +Heather Theurer +Hector Guimard +Hedi Slimane +Hein Gorny +Heiner Luepke +Heinrich Kley +Heinrich Lefler +Helen Allingham +Helen Frankenthaler +Helen Levitt +Helene Knoop +Helene Schjerfbeck +Helio Oiticica +Helmut Newton +Hendrick Avercamp +Hendrick Cornelisz Vroom +Hendrick Goltzius +Hendrick ter Brugghen +Hendrik Kerstens +Henri Cartier-Bresson +Henri de Toulouse-Lautrec +Henri Fantin-Latour +Henri Le Fauconnier +Henri Le Sidaner +Henri Lebasque +Henri Manguin +Henri Matisse +Henri Rousseau +Henri-Edmond Cross +Henrietta Harris +Henriette Grindat +Henriëtte Ronner-Knip +Henry Asencio +Henry Darger +Henry Fuseli +Henry Justice Ford +Henry Moore +Henry Moret +Henry Ossawa Tanner +Henry Raeburn +Henry Scott Tuke +Herb Lubalin +Herb Ritts +Herb Trimpe +Herbert Bayer +Herbert List +Herman Brood +Hervé Guibert +Heywood Hardy +Hideyuki Kikuchi +Hieronymus Bosch +Hikari Shimoda +Hilma af Klint +Hippolyte Flandrin +Hirohiko Araki +Hiroshi Nagai +Hiroshi Sugimoto +Hiroshi Yoshida +Hiroyuki-Mitsume Takahashi +Honoré Daumier +Hope Gangloff +Horace Pippin +Horace Vernet +Horst P. Horst +Howard Arkley +Howard Chaykin +Howard Finster +Howard Hodgkin +Howard Pyle +Howard Schatz +Howard Terpning +Howardena Pindell +Hsiao-Ron Cheng +Hubert Robert +Hugh Ferriss +Hugh Kretschmer +Hugues Merle +Hyacinthe Rigaud +Iain Faulkner +Ian Davenport +Ian Howorth +Ian McQue +Ian Miller +Ida Rentoul Outhwaite +Igor Morski +Igor Zenin +Ikenaga Yasunari +Ildiko Neer +Ilse Bing +Ilya Kuvshinov +Ilya Mashkov +Ilya Repin +Inessa Garmash +Ingrid Baars +Ingrid Endel +Inio Asano +Inna Mosina +Irene Sheri +Iris van Herpen +Irma Stern +Irving Penn +Iryna Yermolova +Isaac Cordal +Isaac Julien +Isaac Levitan +Isaac Maimon +Isaiah Zagar +Isamu Noguchi +Ismail Inceoglu +Ito Shinsui +Ivan Aivazovsky +Ivan Albright +Ivan Bilibin +Ivan Fedorovich Choultse +Ivan Marchuk +Ivan Shishkin +Iwona Lifsches +J. J. Grandville +J. Scott Campbell +J.C. Leyendecker +J.M.W. Turner +Jacek Malczewski +Jacek Yerka +Jack Butler Yeats +Jack Davis +Jack Gaughan +Jack Hughes +Jack Kirby +Jack Ohman +Jack Spencer +Jack Vettriano +Jack Whitten +Jackson Pollock +Jacob Hashimoto +Jacob Jordaens +Jacob Lawrence +Jacob van Ruisdael +Jacopo Bassano +Jacques Henri Lartigue +Jacques Tardi +Jacques Villon +Jacques-Firmin Beauvarlet +Jacques-Laurent Agasse +Jacques-Louis David +Jake Wood-Evans +Jakub Różalski +Jakub Schikaneder +James Abbott McNeill Whistler +James Bullough +James C. Christensen +James Ensor +James Gilleard +James Gillray +James Gurney +James Jean +James Lee Byars +James McIntosh Patrick +James Nares +James Paick +James Pradier +James Rosenquist +James Stokoe +James Thomas Watts +James Tissot +James Turrell +James Wilson Morrice +Jamie Baldridge +Jamie Hawkesworth +Jamie Heiden +Jamie Hewlett +Jamie McKelvie +Jan Brueghel the Elder +Jan Davidsz de Heem +Jan Ditlev +Jan Frans van Bloemen +Jan Mankes +Jan Matejko +Jan Pietersz Saenredam +Jan Saudek +Jan Steen +Jan Toorop +Jan Urschel +Jan van Eyck +Jan van Goyen +Jan van Kessel the Elder +Jan van Scorel +Jane Crowther +Jane Graverol +Jane Newland +Janek Sedlar +Janet Delaney +Janet Echelman +Janice Sung +Janine Antoni +Janne Kahila +Januz Miralles +Jarek Kubicki +Jasmine Becket-Griffith +Jason A. Engle +Jason deCaires Taylor +Jason Edmiston +Jason Middlebrook +Jason Pearson +Jasper Francis Cropsey +Jasper Johns +Jaume Plensa +Jay Anacleto +Jay DeFeo +Jean Arp +Jean Auguste Dominique Ingres +Jean Cocteau +Jean Delville +Jean Dubuffet +Jean Fouquet +Jean Giraud +Jean Jullien +Jean Metzinger +Jean Nouvel +Jean Restout the Younger +Jean-Antoine Watteau +Jean-Baptiste Carpeaux +Jean-Baptiste Monge +Jean-François Millet +Jean-Gabriel Domergue +Jean-Honoré Fragonard +Jean-Joseph Benjamin-Constant +Jean-Léon Gérôme +Jean-Louis Forain +Jean-Michel Basquiat +Jean-Paul Riopelle +Jean-Sebastien Rossbach +Jeanloup Sieff +Jeannette Guichard-Bunel +JeeYoung Lee +Jeff Danziger +Jeff Easley +Jeff Kinney +Jeff Koons +Jeff Legg +Jeff Lemire +Jeff Rowland +Jeff Simpson +Jeff Smith +Jeff Soto +Jeff Wall +Jeffrey Catherine Jones +Jeffrey Smart +Jeffrey Smith +Jeffrey T. Larson +Jennifer Rubell +Jenny Saville +Jeppe Hein +Jeremiah Ketner +Jeremy Geddes +Jeremy Lipking +Jeremy Mann +Jerry Pinkney +Jerry Schatzberg +Jerry Siegel +Jesper Ejsing +Jessica Drossin +Jessica Rossier +Jessie Willcox Smith +Jhonen Vasquez +Jillian Tamaki +Jim Burns +Jim Davis +Jim Dine +Jim Holland +Jim Lee +Jim Lively +Jim Mahfood +Jim Woodring +Jimmy Lawlor +Jindrich Styrsky +Jo Ann Callis +Joachim Beuckelaer +Joachim Brohm +Joachim Patinir +Joachim Wtewael +Joan Eardley +Joan Miró +Joan Mitchell +Joana Vasconcelos +Joao Ruas +Joaquín Sorolla +Jocelyn Hobbie +Jody Bergsma +Joe Fenton +Joe Jusko +Joe Kubert +Joe Madureira +Joe Quesada +Joe Shuster +Joel Meyerowitz +Joel Rea +Joel Robison +Joel Sternfeld +Johan Christian Dahl +Johan Messely +Johannes Itten +Johannes Jan Schoonhoven +Johannes Vermeer +Johannes Voss +Johfra Bosschart +John Anster Fitzgerald +John Atkinson Grimshaw +John Batho +John Bauer +John Berkey +John Blanche +John Brack +John Bratby +John Buscema +John Butler Yeats +John Cassaday +John Chamberlain +John Closterman +John Collier +John Constable +John Crome +John Currin +John Duncan +John Everett Millais +John Frederick Kensett +John French Sloan +John Harris +John Hejduk +John Henry Twachtman +John Holcroft +John Howe +John Hoyland +John James Audubon +John Kenn Mortensen +John La Farge +John Larriva +John Lavery +John Lowrie Morrison +John Lurie +John Martin +John Mckinstry +John Melhuish Strudwick +John Pawson +John Perceval +John Philip Falter +John Piper +John Pitre +John Reuss +John Roddam Spencer Stanhope +John Ruskin +John Salminen +John Sell Cotman +John Singer Sargent +John Sloane +John T. Biggers +John Tenniel +John Trumbull +John Watkiss +John Wayne Gacy +John White Alexander +John Wilhelm +John William Godward +John William Waterhouse +Johnson Tsang +Jon Burgerman +Jon Foster +Jon J Muth +Jon Klassen +Jon McNaught +Jonas Burgert +Jonas Lie +Jonathan Lasker +Jonathan Meese +Jonathan Wolstenholme +Joram Roukes +Josan Gonzalez +José Clemente Orozco +Joseba Elorza +Josef Albers +Josef Capek +Josef Kote +Joseph Beuys +Joseph Clement Coll +Joseph Cornell +Joseph Ducreux +Joseph Farquharson +Joseph Lorusso +Joseph Stella +Josephine Wall +Josh Adamski +Josh Keyes +Joshua Reynolds +Joyce Kozloff +József Rippl-Rónai +Juan Gris +Judith Leyster +Judy Chicago +Juergen Teller +Jules Bastien-Lepage +Jules Breton +Jules Cheret +Jules Pascin +Jules Tavernier +Julian Opie +Julian Schnabel +Juliana Nan +Julie Bell +Julie Blackmon +Julie Mehretu +Julio Larraz +Julio Le Parc +Jun Kaneko +Junji Ito +Junko Mizuno +Jusepe de Ribera +Justin Gaffrey +Justin Gerard +Justin Roiland +Kadir Nelson +Kaethe Butcher +Kaja Foglio +Kara Walker +Karel Thole +Karen Knorr +Karen Wallis +Karl Blossfeldt +Karl Friedrich Schinkel +Karl Gerstner +Karl Knaths +Karl Schmidt-Rottluff +Karol Bak +Kate Beaton +Kate Greenaway +Katerina Belkina +Käthe Kollwitz +Kathrin Longhurst +Kathryn Morris Trotter +Kati Horna +Katia Chausheva +Katsuhiro Otomo +Katsushika Hokusai +Kawanabe Kyōsai +Kay Nielsen +Kay Sage +Kazimir Malevich +Kazuki Takamatsu +Kazuo Koike +Kazuo Shiraga +Kees Scherer +Kees van Dongen +Kehinde Wiley +Keith Haring +Keith Mallett +Keith Negley +Keith Parkinson +Kelly Freas +Kelly McKernan +Kelly Sue Deconnick +Kelly Vivanco +Ken Kelly +Ken Sugimori +Kengo Kuma +Kenne Gregoire +Kenneth Noland +Kenneth Rocafort +Kenny Scharf +Kenro Izu +Kent Monkman +Kentaro Miura +Kerby Rosanes +Kerry James Marshall +Kestutis Kasparavicius +Kevin McNeal +Kevin Sloan +Kieron Gillen +Kiki Smith +Kilian Eng +Kim Jung Gi +Kim Keever +Kirsty Mitchell +Kishin Shinoyama +Kitagawa Utamaro +Kitty Lange Kielland +Klaus Janson +Klaus Pillon +Klaus Wittmann +Koloman Moser +Konstantin Korovin +Konstantin Somov +Konstantin Yuon +Koson Ohara +Krenz Cushart +Kris Knight +Kuno Veeber +Kurt Hutton +Kurt Schwitters +Kurt Wenner +Kuzma Petrov-Vodkin +Kyffin Williams +Kylli Sparre +L. Birge Harrison +L. S. Lowry +Larry Carlson +Larry Elmore +Larry Fink +Larry Poons +Larry Sultan +László Moholy-Nagy +Laura Makabresku +Laure Albin Guillot +Laurel Burch +Lauren Faust +Laurent Baheux +Laurent Grasso +Laurie Greasley +Laurie Lipton +Lavinia Fontana +Lawren Harris +Lawrence Alma-Tadema +Lawrence Weiner +Leandro Erlich +Leanne Surfleet +Lee Bogle +Lee Bontecou +Lee Jeffries +Lee Krasner +Lee Madgwick +Leiji Matsumoto +Leo Putz +Léon Bakst +Leon Kossoff +Leon Spilliaert +Leonardo da Vinci +Leonetto Cappiello +Leonid Afremov +Leonor Fini +Leonora Carrington +LeRoy Neiman +Les Edwards +Lesser Ury +Leszek Bujnowski +Leticia Gillett +Lev Lagorio +Lewis Morley +Li Wei +Liam Gillick +Liam Sharp +Liam Wong +Lilia Alvarado +Lilla Cabot Perry +Lillian Bassman +Linnea Strid +Lisa Frank +Lisa Holloway +Lise Deharme +Lisette Model +Liu Ye +Lois Greenfield +Lois van Baarle +Lorenz Hideyoshi +Loretta Lux +Lori Earley +Lorser Feitelson +Loui Jover +Louis Anquetin +Louis Aston Knight +Louis Comfort Tiffany +Louis Faurer +Louis Icart +Louis Janmot +Louis Kahn +Louis Majorelle +Louis Marcoussis +Louis Rhead +Louis Stettner +Louis Valtat +Louis Wain +Louis Weldon Hawkins +Louise Bourgeois +Louise Dahl-Wolfe +Lovis Corinth +Lowell Herrero +Luca della Robbia +Luca Giordano +Lucas Cranach the Elder +Lucas Cranach the Younger +Lucian Freud +Lucien Clergue +Lucien Pissarro +Lucio Fontana +Lucy Glendinning +Lucy Grossmith +Lucy Madox Brown +Ludwig Mies van der Rohe +Luigi Loir +Luis Ricardo Falero +Luis Royo +Luke Fildes +Lygia Clark +Lynd Ward +Lynda Barry +Lynda Benglis +Lynette Yiadom-Boakye +Lyonel Feininger +Lyubov Popova +M.C. Escher +M.F. Husain +M.W. Kaluta +Mab Graves +Magali Villeneuve +Maggi Hambling +Magnus Enckell +Maia Flore +Makoto Shinkai +Makoto Shinkhai +Malcolm Howie +Malcolm Liepke +Malcolm Teasdale +Malick Sidibé +Mamoru Oshii +Man Ray +Mandy Disher +Mandy Jurgens +Mao Hamaguchi +Marat Safin +Marc Chagall +Marc Davis +Marc Lagrange +Marc Quinn +Marc Silvestri +Marc Simonetti +Marcel Breuer +Marcel Duchamp +Marcel Mouly +Marcin Sobas +Marco Mazzoni +Marek Okon +Margaret Keane +Margaret Macdonald Mackintosh +Margaret Modlin +Margaret Olley +Margaret Preston +Maria Kreyn +Maria Lassnig +Maria Sibylla Merian +Marianna Rothen +Marianne Breslauer +Marianne North +Marianne Stokes +Marianne von Werefkin +Marie Laurencin +Marie Severin +Marie Spartali Stillman +Marilyn Minter +Marina Abramović +Mario Sorrenti +Mario Testino +Marius Borgeaud +Marjane Satrapi +Marjorie Miller +Mark Arian +Mark Briscoe +Mark Brooks +Mark Catesby +Mark Demsteader +Mark Gertler +Mark Henson +Mark Keathley +Mark Kostabi +Mark Lague +Mark Lovett +Mark Rothko +Mark Ryden +Mark Seliger +Mark Steinmetz +Mark Tobey +Marko Manev +Marlene Dumas +Marsden Hartley +Marta Bevacqua +Martin Ansin +Martin Creed +Martin Deschambault +Martin Grelle +Martin Johnson Heade +Martin Kippenberger +Martin Parr +Martin Puryear +Martin Rak +Martin Schongauer +Martin Stranka +Martin Whatson +Martin Wittfooth +Martina Hoffman +Martine Johanna +Martiros Saryan +Mary Anning +Mary Beale +Mary Blair +Mary Bradish Titcomb +Mary Cassatt +Mary Ellen Mark +Mary Fedden +Mary Heilmann +Mary Jane Ansell +Masaaki Sasamoto +Masamune Shirow +Mat Collishaw +Mati Klarwein +Matt Bors +Matt Fraction +Matt Groening +Matthew Barney +Matthias Grünewald +Matthias Jung +Matti Suuronen +Mattias Adolfsson +Maurice de Vlaminck +Maurice Denis +Maurice Prendergast +Maurice Sapiro +Maurice Sendak +Maurice Utrillo +Maurizio Cattelan +Max Beckmann +Max Dupain +Max Ernst +Max Fleischer +Max Pechstein +Max Rive +Max Weber +Maxfield Parrish +Maxime Maufra +Maximilian Pirner +Maximilien Luce +Maya Lin +Maynard Dixon +Medardo Rosso +Meghan Howland +Mehmed Siyah-Kalem +Melissa Launay +Melvin Sokolsky +Meredith Marsone +Méret Oppenheim +Meryl McMaster +Michael Ancher +Michael Borremans +Michael Carson +Michael Cheval +Michael Cho +Michael Craig-Martin +Michael Creese +Michael Deforge +Michael Eastman +Michael Garmash +Michael Heizer +Michael Hussar +Michael Hutter +Michael Kenna +Michael Malm +Michael Page +Michael Parkes +Michael Shainblum +Michael Shapcott +Michael Sowa +Michael Sweerts +Michael Vincent Manalo +Michael Wesely +Michael Whelan +Michal Karcz +Michal Lisowski +Michelangelo Pistoletto +Mickalene Thomas +Miho Hirano +Mikalojus Konstantinas Ciurlionis +Mike Allred +Mike Campau +Mike Dargas +Mike Deodato +Mike Judge +Mike Kelley +Mike Mayhew +Mike Mignola +Mike Ploog +Mike Winkelmann +Mike Worrall +Mikhail Larionov +Mikhail Nesterov +Mikhail Vrubel +Miki Asai +Mikko Lagerstedt +Miles Aldridge +Milo Manara +Milton Avery +Milton Glaser +Mimmo Rotella +Minjae Lee +Miriam Schapiro +Miroslav Tichý +Misha Gordin +Miss Aniela +Moise Kisling +Mona Hatoum +Monia Merlo +Mordecai Ardon +Morris Hirshfield +Morris Louis +Mort Künstler +Moses Soyer +Moshe Safdie +Myles Birket Foster +Myoung Ho Lee +N.C. Wyeth +N/A Affandi +N/A Archillect +N/A Artgerm +N/A Balthus +N/A Banksy +N/A Beeple +N/A Bordalo II +N/A Brassaï +N/A Bronzino +N/A Byam Shaw +N/A Canaletto +N/A Caravaggio +N/A Craola +N/A Domenichino +N/A Duccio +N/A El Greco +N/A Giorgione +N/A Giotto +N/A Guercino +N/A Hergé +N/A Hiroshige +N/A HUSH +N/A Kunisada +N/A Kurzgesagt +N/A Masaccio +N/A Michelangelo +N/A Moebius +N/A OSGEMEOS +N/A Parmigianino +N/A Pinturicchio +N/A Pontormo +N/A Raphael +N/A RHADS +N/A ROA +N/A Rone +N/A Sardax +N/A Sparth +N/A teamLab +N/A theCHAMBA +N/A Tintoretto +N/A Titian +N/A Tom of Finland +N/A Yuumei +Nadav Kander +Nam June Paik +Nan Goldin +Naoki Urasawa +Naoko Takeuchi +Naomi Okubo +Naomi Tydeman +Naoto Hattori +Natalia Drepina +Natalia Goncharova +Natalia Rak +Natalie Shau +Nathan Coley +Nathan Spoor +Nathan Wirth +ND Stevenson +Neal Adams +Neil Gaiman +Neil Welliver +Nele Zirnite +Nell Dorr +Nelleke Pieters +Nicholas Hely Hutchinson +Nicholas Hilliard +Nicholas Hughes +Nicholas Roerich +Nick Alm +Nick Knight +Nick Veasey +Nick Walker +Nickolas Muray +Nicola Samori +Nicolaes Maes +Nicolaes van Verendael +Nicolas Bruno +Nicolas de Stael +Nicolas Delort +Nicolas Mignard +Nicolas Poussin +Nicole Eisenman +Nicoletta Ceccoli +Nigel van Wieck +Nike Savvas +Nikolai Ge +Nikolai Lockertsen +Nikolay Makovsky +Nikolina Petolas +Nina Leen +Njideka Akunyili Crosby +Noah Bradley +Nobuyoshi Araki +Noell Oszvald +Nora Heysen +Noriyoshi Ohrai +Norman Ackroyd +Norman Bluhm +Norman Cornish +Norman Foster +Norman Lindsay +Norman Parkinson +Norman Rockwell +Octavio Ocampo +Odd Nerdrum +Odilon Redon +Ohara Koson +Okuda San Miguel +Olafur Eliasson +Oleg Oprisco +Oleg Shuplyak +Olive Cotton +Oliver Jeffers +Oliver Wetter +Olivia Locher +Olivier Bonhomme +Olivier Valsecchi +Orazio Gentileschi +Osamu Tezuka +Oscar Niemeyer +Oskar Fischinger +Oskar Kokoschka +Oskar Schlemmer +Ossip Zadkine +Oswaldo Guayasamin +Otto Dix +Otto Marseus van Schrieck +Otto Piene +Pablo Picasso +Pam Hawkes +Pamela Colman Smith +Paolo Roversi +Paolo Soleri +Paolo Uccello +Paolo Veronese +Pascale Campion +Pat Steir +Patrice Murciano +Patricia Piccinini +Patricia Polacco +Patrick Brown +Patrick Caulfield +Patrick Demarchelier +Patrick Dougherty +Patrick Heron +Patrick McHale +Patrick Nagel +Patrick Woodroffe +Patty Maher +Paul Barson +Paul Cadmus +Paul Catherall +Paul Cézanne +Paul Chabas +Paul Corfield +Paul Delvaux +Paul Fusco +Paul Gauguin +Paul Gustav Fischer +Paul Hedley +Paul Henry +Paul Klee +Paul Laffoley +Paul Lehr +Paul Lovering +Paul Nash +Paul Pelletier +Paul Poiret +Paul Rader +Paul Rand +Paul Ranson +Paul Rudolph +Paul Sérusier +Paul Signac +Paul Strand +Paul Wonner +Paula Modersohn-Becker +Paula Rego +Paula Scher +Paulus Potter +Pawel Kuczynski +Peder Balke +Peder Mork Monsted +Pegi Nicol MacLeod +Pendleton Ward +Percy Tarrant +Peregrine Heathcote +Perry Rhodan +Pete Turner +Peter Andrew Jones +Peter Bagge +Peter Basch +Peter Blake +Peter Coulson +Peter Doig +Peter Eisenman +Peter Elson +Peter Gric +Peter Holme III +Peter Howson +Peter Kemp +Peter Lely +Peter Lindbergh +Peter Lippmann +Peter Max +Peter Milligan +Peter Mitchev +Peter Mohrbacher +Peter Paul Rubens +Peter Saville +Peter Sculthorpe +Peter Sedgley +Peter Wileman +Peter Zumthor +Petra Cortright +Petrina Hicks +Phil Foglio +Phil Jimenez +Phil Koch +Phil Noto +Philip Guston +Philip McKay +Philip Pearlstein +Philip Taaffe +Philip Treacy +Philip Wilson Steer +Philip-Lorca diCorcia +Philipp Otto Runge +Philippe Buchet +Philippe de Champaigne +Philippe Druillet +Philippe Halsman +Philippe Parreno +Phoebe Anna Traquair +Phyllida Barlow +Piero della Francesca +Piero di Cosimo +Pierre Bonnard +Pierre Huyghe +Pierre Pellegrini +Pierre Puvis de Chavannes +Pierre Soulages +Pierre-Auguste Renoir +Piet Hein Eek +Piet Mondrian +Pieter Aertsen +Pieter Bruegel the Elder +Pieter Brueghel the Younger +Pieter Claesz +Pieter de Hooch +Pieter Hugo +Pieter Jansz Saenredam +Pieter Nason +Pieter-Jansz van Asch +Pietro Antonio Rotari +Pietro da Cortona +Pietro da Rimini +Pino Daeni +Pipilotti Rist +PJ Crook +Pol Ledent +Polixeni Papapetrou +Pompeo Batoni +Posuka Demizu +Prudence Heward +Q Hayashida +Qian Xuan +Quentin Blake +Quint Buchholz +R. Kenton Nelson +Rachel Maclean +Rachel Ruysch +Rachel Whiteread +Rafael Albuquerque +Rafael Zabaleta +Ragnar Kjartansson +Raimonds Staprans +Raimundo de Madrazo y Garreta +Raina Telgemeier +Ralph Bakshi +Ralph Blakelock +Ralph Horsley +Ralph McQuarrie +Ralph Steadman +Ramon Casas +Randolph Caldecott +Randolph Stanley Hewton +Range Murata +Raoul De Keyser +Raoul Dufy +Raphael Kirchner +Raphael Lacoste +Raphael Soyer +Raphaelle Peale +Rashad Alakbarov +Ravi Zupa +Ray Caesar +Ray Collins +Ray Donley +Ray Eames +Ray Metzker +Raymond Briggs +Raymond Duchamp-Villon +Raymond Leech +Raymond Pettibon +Raymond Swanland +Raynald Leclerc +Rebeca Saray +Rebecca Guay +Rebecca Louise Law +Rebecca Sugar +Regina Valluzzi +Reginald Marsh +Rembrandt van Rijn +Remedios Varo +Ren Hang +Renato Guttuso +Rene Burri +René Lalique +Rene Magritte +Reylia Slaby +Ricardo Bofill +Richard Anderson +Richard Bergh +Richard Billingham +Richard Burlet +Richard Corben +Richard Dadd +Richard Deacon +Richard Diebenkorn +Richard Doyle +Richard E. Miller +Richard Eurich +Richard Hamilton +Richard Lindner +Richard Long +Richard McGuire +Richard Meier +Richard Misrach +Richard Mosse +Richard Parkes Bonington +Richard Phillips +Richard Pousette-Dart +Richard S. Johnson +Richard Scarry +Richard Schmid +Richard Serra +Richard Tuttle +Rick Amor +Rick Guidice +Rick Owens +Rimel Neffati +Rinko Kawauchi +Rob Browning +Rob Gonsalves +Rob Guillory +Rob Hefferan +Rob Liefeld +Robby Cavanaugh +Robert Antoine Pinchon +Robert Bateman +Robert Bissell +Robert Campin +Robert Crumb +Robert Delaunay +Robert Gillmor +Robert Hagan +Robert Henri +Robert Indiana +Robert Irwin +Robert Mapplethorpe +Robert McCall +Robert McGinnis +Robert Motherwell +Robert Rauschenberg +Robert S. Duncanson +Robert Smithson +Robert Stivers +Robert Vonnoh +Robert Williams +Roberto Ferri +Roberto Matta +Rockwell Kent +Rodney Matthews +Rodney Smith +Roger de La Fresnaye +Roger Deakins +Roger Dean +Rogier van der Weyden +Roland Topor +Rolf Armstrong +Romaine Brooks +Roman Vishniac +Romare Bearden +Romero Britto +Romina Ressia +Ron Arad +Ron English +Ron Mueck +Ron Walotsky +Ronald Balfour +Ronald Searle +Ronald Wimberly +Roni Horn +Rosa Bonheur +Rosalba Carriera +Rose Wylie +Ross Tran +Roxy Paine +Roy DeCarava +Roy Lichtenstein +Roz Chast +Ruan Jia +Rudolf Ernst +Rudy Siswanto +Rufino Tamayo +Rui Palha +Rumiko Takahashi +Rupert Bunny +Rupert Vandervell +Rupi Kaur +Ruslan Lobanov +Russ Mills +Russell Dauterman +Ruth Bernhard +Ruth Orkin +Ryan Hewett +Ryan McGinley +Ryan Ottley +Ryan Stegman +Ryohei Fuke +Ryohei Hase +Ryoji Ikeda +Sabbas Apterus +Sacha Goldberger +Sailor Moon +Sally Mann +Sally Storch +Salomon van Ruysdael +Salvador Dalí +Salvator Rosa +Sam Bosma +Sam Francis +Sam Gilliam +Sam Guay +Sam Kieth +Sam Spratt +Sam Toft +Samantha Keely Smith +Samuel Melton Fisher +Samuel Peploe +Samuel van Hoogstraten +Sandra Chevrier +Sandra Dieckmann +Sandro Botticelli +Sandy Skoglund +Sanford Robinson Gifford +Santiago Calatrava +Santiago Caruso +Santiago Rusinol +Sarah Andersen +Sarah Lucas +Sarah Morris +Sarah Sze +Satoshi Kon +Saturno Butto +Saul Leiter +Saul Steinberg +Scarlett Hooft Graafland +Scott Kolins +Scott Listfield +Scott McCloud +Scott Naismith +Scott Rohlfs +Sean Scully +Sean Yoro +Sebastian Errazuriz +Sebastiano del Piombo +Serge Attukwei Clottey +Serge Marshennikov +Serge Najjar +Sergey Musin +Sergio Aragonés +Sergio Larraín +Sergio Toppi +Seth Globepainter +Shaun Tan +Sheila Hicks +Shepard Fairey +Sherree Valentine Daines +Sherry Akrami +Shigenori Soejima +Shilin Huang +Shinji Aramaki +Shintaro Kago +Shirin Neshat +Shohei Otomo +Shotaro Ishinomori +Shozo Shimamoto +Shuzo Oshimi +Sidney Nolan +Sidney Sime +Signe Vilstrup +Silvestro Lega +Simeon Solomon +Simon Birch +Simon Bisley +Simon Prades +Simon Stalenhag +Simon Vouet +Simone Bianchi +Sir James Guthrie +Siya Oum +Skottie Young +Slawomir Maniak +Slim Aarons +Sofonisba Anguissola +Sol LeWitt +Sonia Delaunay +Sopheap Pich +Sophie Anderson +Sou Fujimoto +Spencer Tunick +Squeak Carnwath +Stan Berenstain +Stanhope Forbes +Stanisław Szukalski +Stanisław Wyspiański +Stanley Donwood +Stanley Kubrick +Stanley Spencer +Stanley William Hayter +Stasia Burrington +Stefan Gesell +Stephan Martinière +Stephanie Rew +Stephen Darbishire +Stephen Gammell +Stephen Hillenburg +Stephen Mackey +Stephen Ormandy +Stephen Shore +Stephen Shortridge +Stephen Wiltshire +Stephen Youll +Steve Argyle +Steve Dillon +Steve Ditko +Steve Epting +Steve Hanks +Steve Henderson +Steve Hillier +Steve Lieber +Steve McCurry +Steve Sack +Steven Holl +Steven Klein +Steven Meisel +Steven Outram +Storm Thorgerson +Stuart Davis +Stuart Haygarth +Stuart Immonen +Subodh Gupta +Sudersan Pattnaik +Sui Ishida +Susan Seddon Boulet +Suzanne Valadon +Sven Nordqvist +Sverre Fehn +Syd Mead +Sydney Prior Hall +Tadao Ando +Taiyō Matsumoto +Takashi Murakami +Takato Yamamoto +Takeshi Obata +Talbot Hughes +Tamara de Lempicka +Tami Bone +Tanya Shatseva +Tara McPherson +Taras Loboda +Tarsila do Amaral +Tatiana Suarez +Tatsuo Miyajima +Tatsuro Kiuchi +Taylor Wessing +Ted Nasmith +Temmie Chang +Terada Katsuya +Terry Dodson +Terry O'Neill +Terry Oakes +Terry Redlin +Tetsuya Nomura +Teun Hocks +Tex Avery +Theo van Doesburg +Theo van Rysselberghe +Theodor Kittelsen +Théodore Chassériau +Théodore Géricault +Theodore Robinson +Théophile Steinlen +Thiago Valdi +Thom Mayne +Thomas Barbey +Thomas Benjamin Kennington +Thomas Birch +Thomas Blackshear +Thomas Cole +Thomas Dewing +Thomas Dodd +Thomas Eakins +Thomas Edwin Mostyn +Thomas Gainsborough +Thomas Hart Benton +Thomas Heatherwick +Thomas Kinkade +Thomas Lawrence +Thomas Leuthard +Thomas Moran +Thomas Nast +Thomas Saliot +Thomas Sully +Thomas W Schaller +Thomas Wrede +Thurston Hopkins +Tiago Hoisel +Tibor Nagy +Tiffany Bozic +Tim Burton +Tim Doyle +Tim Etchells +Tim Hildebrandt +Tim Holtz +Tim Okamura +Tim Shumate +Tim Walker +Tim White +Timothy Easton +Titus Kaphar +TJ Drysdale +Toby Fox +Todd Hido +Todd McFarlane +Todd Nauck +Todd Schorr +Tokujin Yoshioka +Tom Bagshaw +Tom Chambers +Tom Everhart +Tom Fruin +Tom Grummett +Tom Hammick +Tom Killion +Tom Roberts +Tom Thomson +Tom Wesselmann +Tom Whalen +Tomás Saraceno +Tomasz Alen Kopera +Tomer Hanuka +Tomma Abts +Tomokazu Matsuyama +Ton Dubbeldam +Toni Frissell +Tony Conrad +Tony Cragg +Tony DiTerlizzi +Tony Fitzpatrick +Tony Moore +Tony Orrico +Tony Oursler +Tony Sart +Tooth Wu +Toshiko Okanoue +Toshio Saeki +Tove Jansson +Toyo Ito +Tracey Emin +Tracie Grimwood +Tran Nguyen +Travis Louie +Trevor Brown +Trish Mistric +Tristan Eaton +Troy Brooks +Truls Espedal +Tsutomu Nihei +Tyler Edlin +Tyler Rayburn +Tyler Shields +Ub Iwerks +Uemura Shoen +Umberto Boccioni +Ursula von Rydingsvard +Utagawa Kuniyoshi +Valentin de Boulogne +Valentin Rekunenko +Valentin Serov +Valerie Hegarty +Valerio Olgiati +Vanessa Beecroft +Vanessa Bell +Vania Zouravliov +Vasily Vereshchagin +Verner Panton +Veronika Pinke +Vicente Romero Redondo +Victo Ngai +Victor Brauner +Victor Enrich +Victor Horta +Victor Moscoso +Victor Nizovtsev +Victor Pasmore +Victor Vasarely +Victoria Crowe +Victoria Selbach +Vija Celmins +Vik Muniz +Viktor Vasnetsov +Vilhelm Hammershoi +Vincent Callebaut +Vincent Desiderio +Vincent Di Fate +Vincent van Gogh +Virgil Finlay +Virginia Frances Sterrett +Vito Acconci +Vittorio Matteo Corcos +Vittorio Reggianini +Vivian Maier +Viviane Sassen +Vivienne Westwood +Vladimir Kush +Vladimir Tatlin +Vladimir Volegov +Vytautas Kairiukstis +W. Eugene Smith +W. Heath Robinson +Wade Guyton +Wadim Kashin +Walker Evans +Walt Disney +Walt Kelly +Walter Crane +Walter Ernest Webster +Walter Gropius +Walter Langley +Walter Launt Palmer +Walter Percy Day +Walter Sickert +Wangechi Mutu +Warren Ellis +Warwick Goble +Wassily Kandinsky +Wayne Barlowe +Wayne Thiebaud +Wendy Froud +Wes Anderson +Wifredo Lam +Wilhelmina Barns-Graham +Will Barnet +Will Eisner +Willard Metcalf +Willem Basse +Willem Claesz. Heda +Willem de Kooning +Willem Haenraets +Willem Kalf +Willem van Aelst +Willem van Haecht +Willi Baumeister +William Blake +William Dyce +William Eggleston +William Etty +William Gropper +William Henry Hunt +William Henry Margetson +William Hogarth +William Holman Hunt +William James Glackens +William Kay Blacklock +William Kentridge +William Klein +William Langson Lathrop +William Larkin +William Morris +William Nicholson +William Oxer +William Powell Frith +William Russell Flint +William S. Burroughs +William Stanley Haseltine +William Steig +William Stout +William Timlin +William Trost Richards +William Turner +William Wegman +William Wendt +William Whitaker +William Wray +William Zorach +William-Adolphe Bouguereau +Wim Delvoye +Wim Wenders +Winifred Knights +Winslow Homer +Winsor McCay +Wlad Safronow +Wolfgang Suschitzky +Wong Kar-wai +Worthington Whittredge +Wu Guanzhong +Xiaofei Yue +Xu Bing +Yaacov Agam +Yanjun Cheng +Yasuo Kuniyoshi +Yasushi Nirasawa +Yayoi Kusama +Yiannis Moralis +Yigal Ozeri +Yinka Shonibare +Yoann Lossel +Yoh Nagao +Yoji Shinkawa +Yoshitaka Amano +Yoshiyuki Tomino +Yosuke Ueno +Yuko Shimizu +Yuri Shwedoff +Yusuke Murata +Yves Klein +Yves Tanguy +Yvonne Coomber +Zack Snyder +Zaha Hadid +Zdzisław Beksiński +Zena Holloway +Zhang Jingna +Zhang Kechun +Zhichao Cai +Zinaida Serebriakova +Zoe Buckman diff --git a/wildcards/color.txt b/wildcards/color.txt new file mode 100644 index 0000000000000000000000000000000000000000..467898638cec80b44589f0d70e1bd555d2b28ac1 --- /dev/null +++ b/wildcards/color.txt @@ -0,0 +1,17 @@ +aqua +black +blue +fuchsia +gray +green +lime +maroon +navy +olive +orange +purple +red +silver +teal +white +yellow diff --git a/wildcards/color_flower.txt b/wildcards/color_flower.txt new file mode 100644 index 0000000000000000000000000000000000000000..bce7a3c6c9015e635794cde4f85b95bb99344044 --- /dev/null +++ b/wildcards/color_flower.txt @@ -0,0 +1 @@ +__color__ __flower__ diff --git a/wildcards/extended-color.txt b/wildcards/extended-color.txt new file mode 100644 index 0000000000000000000000000000000000000000..04cf7af72a6a85a01f523ccc8a37a4a9c399e98c --- /dev/null +++ b/wildcards/extended-color.txt @@ -0,0 +1,147 @@ +aliceblue +antiquewhite +aqua +aquamarine +azure +beige +bisque +black +blanchedalmond +blue +blueviolet +brown +burlywood +cadetblue +chartreuse +chocolate +coral +cornflowerblue +cornsilk +crimson +cyan +darkblue +darkcyan +darkgoldenrod +darkgray +darkgreen +darkgrey +darkkhaki +darkmagenta +darkolivegreen +darkorange +darkorchid +darkred +darksalmon +darkseagreen +darkslateblue +darkslategray +darkslategrey +darkturquoise +darkviolet +deeppink +deepskyblue +dimgray +dimgrey +dodgerblue +firebrick +floralwhite +forestgreen +fuchsia +gainsboro +ghostwhite +gold +goldenrod +gray +green +greenyellow +grey +honeydew +hotpink +indianred +indigo +ivory +khaki +lavender +lavenderblush +lawngreen +lemonchiffon +lightblue +lightcoral +lightcyan +lightgoldenrodyellow +lightgray +lightgreen +lightgrey +lightpink +lightsalmon +lightseagreen +lightskyblue +lightslategray +lightslategrey +lightsteelblue +lightyellow +lime +limegreen +linen +magenta +maroon +mediumaquamarine +mediumblue +mediumorchid +mediumpurple +mediumseagreen +mediumslateblue +mediumspringgreen +mediumturquoise +mediumvioletred +midnightblue +mintcream +mistyrose +moccasin +navajowhite +navy +oldlace +olive +olivedrab +orange +orangered +orchid +palegoldenrod +palegreen +paleturquoise +palevioletred +papayawhip +peachpuff +peru +pink +plum +powderblue +purple +red +rosybrown +royalblue +saddlebrown +salmon +sandybrown +seagreen +seashell +sienna +silver +skyblue +slateblue +slategray +slategrey +snow +springgreen +steelblue +tan +teal +thistle +tomato +turquoise +violet +wheat +white +whitesmoke +yellow +yellowgreen diff --git a/wildcards/flower.txt b/wildcards/flower.txt new file mode 100644 index 0000000000000000000000000000000000000000..bb3e984b6f2a9e3d661e084627001f3d42209d5c --- /dev/null +++ b/wildcards/flower.txt @@ -0,0 +1,250 @@ +Acacia +Achillea +Adam's-needle +African Boxwood +African Lily +Agapanthus +Ageratum +Ageratum houstonim +Allium +Alpina +Alstroemeria +Amaranthus hypochondriacus +Amaryllis +Ammi majus +Anconitum +Anemone +Anigozanthus +Annual Delphinium +Anthurium +Antirrhinum majus +Artichoke thistle +Asparagus +Aster +Astilbe +Baby's Breath +Bachelor's Button +Banksia +Bellflower +Big Flax +Bighead Knapweed +Billy Buttons +Bird of Paradise +Blazing Star +Blue Lace Flower +Boronia +Bouvardia +Boxwood African +Diosma +Buckthorn Variegated +Buddleia +Bupleurum +Butterfly Bush +Butterfly Orchid +Calla Lily +Campanula +Candytuft +Canterbury Bells +Carnation +Carthamus +Casa Blanca +Caspia +Cattleya +Celosia +Celosia argenta +Centaurea cyanus +Chamelaucium +Chimney Bells +Chrysanthemum +Chrysanthemum x morifolium +Clarkia +Cockscomb Crested +Coffee Bean Berry +Common Myrtle +Common Yarrow +Cone Flower +Consolida ambigua +Convallaria +Cordyline +Cosmos +Cornflower +Craspedia +Curly Willow +Cymbidium +Cymbidium Orchid +Daffodil +Dahlia +Daisy Mums +Delphinium Belladonna +Delphinium Pacific Giant +Dendrobium +Dendrobium Orchid +Dianthus barbatus +Dianthus caryophyllus +Dianthus caryophyllus nana +Erica spp +Eucalyptus seeded +Eucalyptus silver dollar +Eustoma grandiflorum +False Bird of Paradise +False Spirea +Farewell-To-Spring +Fernleaf Yarrow +Feverfew +Flamingo Flower +Flax New Zealand +Floss Flower +Foxtail Fern +Freesia +Freesia x hybrida +Fuji Mums +Gardenia +Gay Feather +Genista +Gerbera +Gerbera Ruby Red +Ginger +Gladiolus +Gladiolus hybrid nanus +Goat's Beard +Godetia +Golden Rod +Guersney Lily +Gyp +Gypsophila paniculata +Hanging Helicona +Heath +Heather +Helianthus annuus +Heliconia spp. +Hippeastrum +Hydrangea +Iberis amara +Inca Lily +Iris +Japhette Orchid +Jonquil +Knapweed +Lace fern +Larkspur +Lathyrus odoratus +Lavandula +Lavender +Liatris +Lilac +Lily +Lilly-of-the-Valley +Lily Casa Blanca +Lily of the Field +Lily of the Nile +Lily Stargazer +Limonium +Lisianthus +Marguerite daisy +Mattholia incana +Melaleuca +Memosa +Misty Blue Limonium +Moluccella laevis +Monkshood +Montbretia +Monte Cassino +Moon orchid +Musa +Myrsine +Myrtle +Myrtus +Nephrolepis +Nerine +Nerine Lily +Nigella +Ornithogalum +Paeonia +Painted Tongue +Paper Reed +Papyrus lion's head +Peony +Peruvian Lily +Phalaenopsis +Philodendron +Phlox +Pincushion Flower +Pink Mink +Pitt +Pittosporum +Pixie Carnation +Polianthes tuberosa +Pompon Chrysanthemum +Poppy Anemone +Porium +Pussy Willow +Queen Anne's Lace +Ranunculus +Red Ribbons +Rice flower +Rose +Rose Bridal Pink +Rose Bridal White +Rose Champagne +Rose Diadem +Rose Emblem +Rose Kardinal +Rose Lady Liberty +Rose Lavanda +Rose Osiana +Rose Royalty +Safari Sunset +Safflower +Sage Perennial +Salix +Salmon Reagan +Sansevieria +Saponaria +Satin Flowers +Saxicola +Scabiosa +Schinus +Sea lavender +Shell Flowers +Snake Plant +Snapdragon +Solidago +Solidaster +Speedwell +Spider Lily +Spider Mums +Spray Carnation +Sprengeri Fern +Star of Bethlehem +Statice +Stenamezon +Stephanotis +Strawberry banksia +Strawflower +Summer poinsettia +Summer's Darling +Sunflower +Sweet Pea +Sweet William +Sword Fern +Syringa vulgaris +Tailflowers +Tassel flower +Thouroughwax +Throatwort +Tracelium +Tree Fern +Trumpet Lily +Tuberose +Tulip +Tulipa +Veronica +Wattle +Waxflower +Wild Plantain +Willow curly +Windflower +Wolfsbane +Zantedeschia +Zinna +Zinnia elegans diff --git a/wildcards/nationality.txt b/wildcards/nationality.txt new file mode 100644 index 0000000000000000000000000000000000000000..a8ae3a57732b15137752df54fdb930f92b6024b6 --- /dev/null +++ b/wildcards/nationality.txt @@ -0,0 +1,195 @@ +Afghan +Albanian +Algerian +American +Andorran +Angolan +Antiguans +Argentine +Armenian +Australian +Austrian +Azerbaijani +Bahamian +Bahraini +Bangladeshi +Barbadian +Barbudans +Batswana +Belarusian +Belgian +Belizean +Beninese +Bhutanese +Bolivian +Bosnian +Brazilian +British +Bruneian +Bulgarian +Burkinabe +Burmese +Burundian +Cambodian +Cameroonian +Canadian +Cape Verdean +Central African +Chadian +Chilean +Chinese +Colombian +Comoran +Congolese +Costa Rican +Croatian +Cuban +Cypriot +Czech +Danish +Djibouti +Dominican +Dutch +East Timorese +Ecuadorean +Egyptian +Emirati +Equatorial Guinean +Eritrean +Estonian +Ethiopian +Fijian +Filipino +Finnish +French +Gabonese +Gambian +Georgian +German +Ghanaian +Greek +Grenadian +Guatemalan +Guinea-Bissauan +Guinean +Guyanese +Haitian +Herzegovinian +Honduran +Hungarian +Icelander +Indian +Indonesian +Iranian +Iraqi +Irish +Israeli +Italian +Ivorian +Jamaican +Japanese +Jordanian +Kazakhstani +Kenyan +Kittian and Nevisian +Kuwaiti +Kyrgyz +Laotian +Latvian +Lebanese +Liberian +Libyan +Liechtensteiner +Lithuanian +Luxembourger +Macedonian +Malagasy +Malawian +Malaysian +Maldivan +Malian +Maltese +Marshallese +Mauritanian +Mauritian +Mexican +Micronesian +Moldovan +Monacan +Mongolian +Montenegrin +Moroccan +Mosotho +Motswana +Mozambican +Namibian +Nauruan +Nepalese +New Zealander +Nicaraguan +Nigerian +Nigerien +North Korean +Northern Irish +Norwegian +Omani +Pakistani +Palauan +Palestinian +Panamanian +Papua New Guinean +Paraguayan +Peruvian +Polish +Portuguese +Qatari +Romanian +Russian +Rwandan +Saint Lucian +Salvadoran +Samoan +San Marinese +Sao Tomean +Saudi +Scottish +Senegalese +Serbian +Seychellois +Sierra Leonean +Singaporean +Slovakian +Slovenian +Solomon Islander +Somali +South African +South Korean +Spanish +Sri Lankan +Sudanese +Surinamer +Swazi +Swedish +Swiss +Syrian +Taiwanese +Tajik +Tanzanian +Thai +Togolese +Tongan +Trinidadian or Tobagonian +Tunisian +Turkish +Tuvaluan +Ugandan +Ukrainian +Uruguayan +Uzbekistani +Vanuatuan +Venezuelan +Vietnamese +Welsh +Yemenite +Zambian +Zimbabwean